Compare commits

4 Commits
main ... fuse

Author SHA1 Message Date
ab9f2cbc09 Add fuse callbacks
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-07 11:00:33 +01:00
ab3cb8bd4e Add fuse access checks
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-06 12:46:03 +01:00
3cad7cce61 Update README
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-05 23:37:35 +01:00
d1d49731ea Add fuser implementation
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
2025-06-05 23:24:26 +01:00
49 changed files with 1857 additions and 411 deletions

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "019256af8ccf4fc3f1ad6daa0ed3bc945f141020837732c9cf680fbcc438c6a8"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n GROUP BY g.name\n ",
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n ",
"describe": {
"columns": [
{
@@ -22,5 +22,5 @@
null
]
},
"hash": "e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8"
"hash": "1493410c6cf4f7a4cadadf5321f42ad265282d3072eded0b9caaa3dc81ab8b45"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "1bf35e562ef97408038259aca7b36f719b5f5697efbce538bf3f4eefec6b8d16"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_groups\n WHERE name <> ALL($1)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": []
},
"hash": "244c0ca382a0bd8040667c05b457d2b55f15670d696faba7d57f42090b040378"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM groups\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM users\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "3613d8c9b991fb1d39ad99ed36778e5ba9933ca3cf0f064ae5f139cee1cdad42"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "364208fb0f4ef56a2ea755481a0d994fed588ff998524521936306e772ae8dce"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM users\n WHERE name = ANY($1)\n ",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_groups\n WHERE name = ANY($1)\n ",
"describe": {
"columns": [
{
@@ -18,5 +18,5 @@
null
]
},
"hash": "090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822"
"hash": "3a6bd1951cac5c82e67fa3610aa90984810cb3e5d596ec0f864ae5aa2631816a"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT name, display_name, password, email, disabled, image\n FROM users\n WHERE name = $1\n ",
"query": "\n SELECT name, display_name, password, email, disabled, picture\n FROM glyph_users\n WHERE name = $1\n ",
"describe": {
"columns": [
{
@@ -30,7 +30,7 @@
},
{
"ordinal": 5,
"name": "image",
"name": "picture",
"type_info": "Text"
}
],
@@ -48,5 +48,5 @@
true
]
},
"hash": "74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166"
"hash": "538bac60e4dff5453e9d69f8b94dd623844f4ab89b2963c625f6c47e2dca9c02"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "5d457b24e090dd2d3be512de08706387e62e4ec7997f60c3958572cce8985c27"
}

View File

@@ -1,19 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n image = EXCLUDED.image\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "6830e994b974d76fdcf51cf0e540ce0fa79b58f8eaf0c7ecb1a22e6fc1ebf505"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_groups\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "6a626592b97a77a5804125753527fc9451fa39565b363d2e54ee06b2f36b177f"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users\n WHERE name <> ALL($1)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray"
]
},
"nullable": []
},
"hash": "8f1394702f150d3642129fcca8417e5e01911c13191be44339dc36a6c4978db2"
}

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"TextArray",
"TextArray"
]
},
"nullable": []
},
"hash": "91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "a15ceb9e5c596ce9639a79436e9e642c65c2cac61054b2fdaa7190c0e8acf0d0"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM users_groups\n WHERE group_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO groups (name) VALUES ($1)",
"query": "INSERT INTO glyph_groups (name) VALUES ($1)",
"describe": {
"columns": [],
"parameters": {
@@ -10,5 +10,5 @@
},
"nullable": []
},
"hash": "b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2"
"hash": "af519bc617842e3d48a976f9aa4b107b2690b0a259ee02b133985e3eb00a4165"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n WHERE u.name = $1\n GROUP BY u.name, u.email, u.disabled, u.image\n ",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n WHERE u.name = $1\n ",
"describe": {
"columns": [
{
@@ -30,7 +30,7 @@
},
{
"ordinal": 5,
"name": "image",
"name": "picture",
"type_info": "Text"
},
{
@@ -54,5 +54,5 @@
null
]
},
"hash": "9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982"
"hash": "b0b3c6daf78b7a04e75b781d997b61a56fbc8005fe1e419febb87b5247f44ade"
}

View File

@@ -1,19 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n GROUP BY u.name, u.email, u.disabled, u.image\n ",
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n ",
"describe": {
"columns": [
{
@@ -30,7 +30,7 @@
},
{
"ordinal": 5,
"name": "image",
"name": "picture",
"type_info": "Text"
},
{
@@ -52,5 +52,5 @@
null
]
},
"hash": "95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4"
"hash": "bc5847efd81251c0e4b524b84f2485ebbd0cd0a813d364815858add87d0e07a2"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users_groups\n WHERE group_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "cd8831c93f8714f5242bf0b3dae6240a46b37d5d163414bd739fa0025b6de0a5"
}

View File

@@ -0,0 +1,19 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool",
"Text"
]
},
"nullable": []
},
"hash": "ce1fade2aaf62ce5a3b1448d92517d73533dd9b098d4dfb679a0e77e4a42b3e6"
}

View File

@@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Text",
"Bool"
]
},
"nullable": []
},
"hash": "e1f9c5d85717bdf150dd2352196d7477db1f00a8776777702227a3a6ef8a8c4a"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM glyph_users\n WHERE name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "e355e946faf174f24ffb3bdb4cf3e6f3047431316b1e6ef752e2c33a0e0a0c07"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM groups\n WHERE name = ANY($1)\n ",
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_users\n WHERE name = ANY($1)\n ",
"describe": {
"columns": [
{
@@ -18,5 +18,5 @@
null
]
},
"hash": "9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8"
"hash": "e53bf6042de37af5560c1861dc4056827cf4ea87449f209dac15ba5d6bfc1704"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM users_groups\n WHERE user_name = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT name\n FROM groups\n WHERE name = $1\n ",
"query": "\n SELECT name\n FROM glyph_groups\n WHERE name = $1\n ",
"describe": {
"columns": [
{
@@ -18,5 +18,5 @@
false
]
},
"hash": "19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9"
"hash": "ee2e54ee09eb411a441931e5f9070f9216093191fb46f25cb0b237ef95c92c5d"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n WHERE g.name = $1\n GROUP BY g.name\n ",
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n WHERE g.name = $1\n ",
"describe": {
"columns": [
{
@@ -24,5 +24,5 @@
null
]
},
"hash": "52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0"
"hash": "f769fe0d94fe52430004b327c1afe839d61eed8b53836bf4f091c9c56fa1cf17"
}

3
Cargo.lock generated
View File

@@ -1250,15 +1250,16 @@ dependencies = [
"axum-extra",
"clap",
"fuser",
"libc",
"log",
"log4rs",
"non-empty-string",
"openidconnect",
"parking_lot",
"passwords",
"redis 0.31.0",
"redis-macros",
"serde",
"serde_json",
"serde_yaml",
"sqlx",
"time",

View File

@@ -20,18 +20,19 @@ async-session = "3.0.0"
axum = { version = "0.8.4", features = ["macros"] }
axum-extra = { version = "0.10.1", features = ["typed-header"] }
clap = { version = "4.5.39", features = ["derive"] }
fuser = "0.15.1"
fuser = { version = "0.15.1", features = ["abi-7-31"] }
libc = "0.2.172"
log = "0.4.27"
log4rs = "1.3.0"
non-empty-string = { version = "0.2.6", features = ["serde"] }
openidconnect = { version = "4.0.0", features = ["reqwest"] }
parking_lot = "0.12.4"
passwords = "3.1.16"
redis = { version = "0.31.0", features = ["tokio-comp"] }
redis-macros = "0.5.4"
serde = "1.0.219"
serde_json = "1.0.140"
serde_yaml = "0.9.34"
sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "time", "uuid"] }
time = { version = "0.3.41", features = ["serde"] }
tokio = { version = "1.45.1", features = ["rt-multi-thread", "process"] }
tokio = { version = "1.45.1", features = ["rt-multi-thread", "process", "signal"] }
uuid = { version = "1.17.0", features = ["serde"] }

View File

@@ -1,6 +1,10 @@
# glyph
*Glyph* is an Authelia user file database manager. Because files are light but unwieldy, and LDAP is convenient but complex.
*Glyph* was an Authelia user file database manager. Because files were light but unwieldy, and LDAP was convenient but complex.
Now, it's turned into a diabolical FUSE MITM contraption that should have never been built. It pretends to be a one-file filesystem (ඞ) that can be read and written to by Authelia, but it actually stores user and group data in a PostgreSQL database.
Do not use this under any circumstances. It is not secure, it is not stable, and it is not created in God's image. It is a joke, and an unfunny one at that.
## Development

View File

@@ -33,6 +33,25 @@
treefmt = inputs.treefmt-nix.lib.evalModule pkgs ./treefmt.nix;
in
{
packages.default = pkgs.rustPlatform.buildRustPackage {
pname = "glyph";
version = "0.1.0";
src = ./.;
cargoLock = {
lockFile = ./Cargo.lock;
};
SQLX_OFFLINE = true;
nativeBuildInputs = with pkgs; [
pkg-config
];
buildInputs = with pkgs; [
fuse3
];
};
devShells.default = pkgs.mkShell {
packages = with pkgs; [
cargo
@@ -42,6 +61,8 @@
cargo-udeps
cargo-outdated
sqlx-cli
fuse3
pkg-config
];
};

View File

@@ -1,29 +1,29 @@
CREATE TABLE IF NOT EXISTS users (
CREATE TABLE IF NOT EXISTS glyph_users (
name TEXT PRIMARY KEY,
display_name TEXT NOT NULL,
password TEXT NOT NULL,
email TEXT NOT NULL UNIQUE,
disabled BOOLEAN NOT NULL,
image TEXT,
picture TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS groups (
CREATE TABLE IF NOT EXISTS glyph_groups (
name TEXT PRIMARY KEY,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS users_groups (
CREATE TABLE IF NOT EXISTS glyph_users_groups (
user_name TEXT NOT NULL,
group_name TEXT NOT NULL,
PRIMARY KEY (user_name, group_name),
FOREIGN KEY (user_name) REFERENCES users(name) ON DELETE CASCADE,
FOREIGN KEY (group_name) REFERENCES groups(name) ON DELETE CASCADE
FOREIGN KEY (user_name) REFERENCES glyph_users(name) ON DELETE CASCADE,
FOREIGN KEY (group_name) REFERENCES glyph_groups(name) ON DELETE CASCADE
);
CREATE OR REPLACE FUNCTION update_timestamp()
CREATE OR REPLACE FUNCTION glyph_update_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
@@ -31,26 +31,26 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_users_timestamp
BEFORE UPDATE ON users
CREATE OR REPLACE TRIGGER glyph_update_users_timestamp
BEFORE UPDATE ON glyph_users
FOR EACH ROW
EXECUTE FUNCTION update_timestamp();
EXECUTE FUNCTION glyph_update_timestamp();
CREATE TRIGGER update_groups_timestamp
BEFORE UPDATE ON groups
CREATE OR REPLACE TRIGGER glyph_update_groups_timestamp
BEFORE UPDATE ON glyph_groups
FOR EACH ROW
EXECUTE FUNCTION update_timestamp();
EXECUTE FUNCTION glyph_update_timestamp();
CREATE OR REPLACE FUNCTION update_users_groups_timestamp()
CREATE OR REPLACE FUNCTION glyph_update_users_groups_timestamp()
RETURNS TRIGGER AS $$
BEGIN
UPDATE users SET updated_at = NOW() WHERE name = NEW.user_name;
UPDATE groups SET updated_at = NOW() WHERE name = NEW.group_name;
UPDATE glyph_users SET updated_at = NOW() WHERE name = NEW.user_name;
UPDATE glyph_groups SET updated_at = NOW() WHERE name = NEW.group_name;
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER update_users_groups_timestamp
AFTER INSERT OR DELETE ON users_groups
CREATE OR REPLACE TRIGGER glyph_update_users_groups_timestamp
AFTER INSERT OR DELETE ON glyph_users_groups
FOR EACH ROW
EXECUTE FUNCTION update_users_groups_timestamp();
EXECUTE FUNCTION glyph_update_users_groups_timestamp();

View File

@@ -1,5 +1,6 @@
use clap::Parser;
use serde::Deserialize;
use sqlx::query;
use std::{
error::Error,
fs,
@@ -7,6 +8,8 @@ use std::{
path::PathBuf,
};
use crate::utils::crypto::hash_password;
#[derive(Clone, Deserialize)]
pub struct ServerConfig {
pub host: String,
@@ -37,8 +40,9 @@ pub struct OAuthConfig {
}
#[derive(Clone, Deserialize)]
pub struct AutheliaConfig {
pub user_database: PathBuf,
pub struct FuseConfig {
pub mount_directory: PathBuf,
pub user_database_name: String,
}
#[derive(Clone, Deserialize)]
@@ -58,13 +62,48 @@ pub struct RedisConfig {
pub database: u8,
}
#[derive(Clone, Deserialize)]
pub struct AdminConfig {
pub name: String,
pub display_name: String,
pub password: String,
pub email: String,
}
impl AdminConfig {
pub async fn upsert(&self, pool: &sqlx::PgPool) -> Result<(), Box<dyn Error + Send + Sync>> {
let password = hash_password(&self.password);
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled
"#,
self.name,
self.display_name,
password,
self.email,
false
)
.execute(pool)
.await?;
Ok(())
}
}
#[derive(Clone, Deserialize)]
pub struct Config {
pub server: ServerConfig,
pub oauth: OAuthConfig,
pub authelia: AutheliaConfig,
pub fuse: FuseConfig,
pub postgresql: PostgresqlConfig,
pub redis: RedisConfig,
pub admin: AdminConfig,
}
impl TryFrom<&PathBuf> for Config {
@@ -86,4 +125,7 @@ pub struct Args {
/// Path to the log4rs config file
#[arg(short, long, value_name = "FILE", default_value = "log4rs.yaml")]
pub log_config: PathBuf,
/// Additional arguments to pass to Authelia
#[arg(last = true, num_args = 0.., allow_hyphen_values = true)]
pub passthrough: Vec<String>,
}

1045
src/fuse.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,7 @@
#![allow(clippy::missing_docs_in_private_items)]
mod config;
mod fuse;
mod models;
mod routes;
mod state;
@@ -11,37 +12,29 @@ use axum::serve;
use clap::Parser;
use log::info;
use log4rs::config::Deserializers;
use std::{error::Error, net::SocketAddr};
use std::net::SocketAddr;
use tokio::net::TcpListener;
use config::{Args, Config};
use config::Args;
use state::State;
#[tokio::main]
async fn main() {
let args = Args::parse();
log4rs::init_file(args.log_config, Deserializers::default()).unwrap();
let args: Args = Args::parse();
log4rs::init_file(args.log_config.clone(), Deserializers::default()).unwrap();
let config = Config::try_from(&args.config).unwrap();
let state = State::from_config(config.clone()).await;
let state = State::from_args(args).await;
init(&state).await.unwrap();
let routes = routes::routes(state.clone());
let app = axum::Router::new().nest(&format!("{}/api", state.config.server.subpath), routes);
let routes = routes::routes(state);
let app = axum::Router::new().nest(&format!("{}/api", config.server.subpath), routes);
let addr = SocketAddr::from((config.server.address, config.server.port));
let addr = SocketAddr::from((state.config.server.address, state.config.server.port));
let listener = TcpListener::bind(addr).await.unwrap();
info!("Listening on {}", listener.local_addr().unwrap());
serve(listener, app).await.unwrap();
}
async fn init(state: &State) -> Result<(), Box<dyn Error + Send + Sync>> {
sqlx::migrate!("./migrations")
.run(&state.pg_pool)
serve(listener, app)
.with_graceful_shutdown(utils::shutdown_signal())
.await
.expect("Failed to run migrations");
Ok(())
.unwrap();
}

View File

@@ -1,24 +1,102 @@
use log::warn;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_yaml::Value;
use sqlx::PgPool;
use std::collections::HashMap;
use std::{collections::HashMap, error::Error};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UsersFile {
pub users: HashMap<String, UserFile>,
pub struct Users {
pub users: HashMap<String, User>,
#[serde(flatten)]
pub extra: Option<HashMap<String, Value>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserFile {
pub struct User {
pub displayname: String,
pub password: String,
pub email: Option<String>,
pub disabled: Option<bool>,
pub picture: Option<String>,
pub groups: Option<Vec<String>>,
#[serde(flatten)]
pub extra: Option<HashMap<String, Value>>,
}
impl TryInto<Vec<super::users::UserWithGroups>> for Users {
type Error = Box<dyn Error + Send + Sync>;
fn try_into(self) -> Result<Vec<super::users::UserWithGroups>, Self::Error> {
self.users
.into_iter()
.map(|(name, user)| {
let groups = user.groups.unwrap_or_default();
Ok(super::users::UserWithGroups {
name: name.clone(),
display_name: user.displayname,
password: user.password,
email: user
.email
.ok_or_else(|| format!("User {} is missing an email", &name))?,
disabled: user.disabled.unwrap_or(false),
picture: user.picture,
groups,
})
})
.collect()
}
}
impl Users {
pub fn from_fuse(pool: &PgPool, contents: &str) {
let Ok(users) = serde_yaml::from_str::<Self>(contents) else {
warn!("Failed to parse users from JSON.");
return;
};
let users_with_groups: Vec<super::users::UserWithGroups> = match users.try_into() {
Ok(users) => users,
Err(e) => {
warn!("Failed to convert Users to UserWithGroups: {e}");
return;
}
};
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async {
super::users::UserWithGroups::upsert_many_delete_remaining(pool, &users_with_groups)
.await
.unwrap_or_else(|e| warn!("Failed to upsert users: {e}"));
});
}
pub async fn to_fuse(pool: &PgPool) -> Result<String, Box<dyn Error + Send + Sync>> {
let users_with_groups = super::users::UserWithGroups::select_all(pool).await?;
let users = Self {
users: users_with_groups
.into_iter()
.map(|user| {
(
user.name.clone(),
User {
displayname: user.display_name,
password: user.password,
email: Some(user.email),
disabled: Some(user.disabled),
picture: user.picture,
groups: Some(user.groups),
extra: None,
},
)
})
.collect(),
extra: None,
};
Ok(serde_yaml::to_string(&users)?)
}
}

View File

@@ -9,7 +9,7 @@ pub struct Group {
}
impl Group {
pub async fn select_by_name(
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
@@ -17,7 +17,7 @@ impl Group {
Group,
r#"
SELECT name
FROM groups
FROM glyph_groups
WHERE name = $1
"#,
name
@@ -28,13 +28,10 @@ impl Group {
Ok(group)
}
pub async fn delete_by_name(
pool: &PgPool,
name: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
DELETE FROM groups
DELETE FROM glyph_groups
WHERE name = $1
"#,
name
@@ -45,14 +42,14 @@ impl Group {
Ok(())
}
pub async fn all_exist_by_names(
pub async fn all_exist(
pool: &PgPool,
names: &[String],
) -> Result<bool, Box<dyn Error + Send + Sync>> {
let row = query!(
r#"
SELECT COUNT(*) AS "count!"
FROM groups
FROM glyph_groups
WHERE name = ANY($1)
"#,
names
@@ -67,20 +64,19 @@ impl Group {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GroupWithUsers {
pub name: String,
#[serde(default)]
pub users: Vec<String>,
}
impl GroupWithUsers {
pub async fn select(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
let groups = query_as!(
GroupWithUsers,
r#"
SELECT
g.name,
COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!"
FROM groups g
LEFT JOIN users_groups ug ON g.name = ug.group_name
GROUP BY g.name
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
FROM glyph_groups g
"#
)
.fetch_all(pool)
@@ -89,7 +85,7 @@ impl GroupWithUsers {
Ok(groups)
}
pub async fn select_by_name(
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
@@ -98,11 +94,9 @@ impl GroupWithUsers {
r#"
SELECT
g.name,
COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!"
FROM groups g
LEFT JOIN users_groups ug ON g.name = ug.group_name
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
FROM glyph_groups g
WHERE g.name = $1
GROUP BY g.name
"#,
name
)
@@ -119,7 +113,7 @@ impl GroupWithUsers {
let mut tx = pool.begin().await?;
query!(
r#"INSERT INTO groups (name) VALUES ($1)"#,
r#"INSERT INTO glyph_groups (name) VALUES ($1)"#,
group_with_users.name
)
.execute(&mut *tx)
@@ -127,8 +121,8 @@ impl GroupWithUsers {
query!(
r#"
INSERT INTO users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&group_with_users.users,
&vec![group_with_users.name.clone(); group_with_users.users.len()]

View File

@@ -19,7 +19,7 @@ impl UsersGroups {
query!(
r#"
DELETE FROM users_groups
DELETE FROM glyph_users_groups
WHERE group_name = $1
"#,
group_name
@@ -29,7 +29,7 @@ impl UsersGroups {
query!(
r#"
INSERT INTO users_groups (user_name, group_name)
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
users,
@@ -50,7 +50,7 @@ impl UsersGroups {
query!(
r#"
DELETE FROM users_groups
DELETE FROM glyph_users_groups
WHERE user_name = $1
"#,
user_name
@@ -60,7 +60,7 @@ impl UsersGroups {
query!(
r#"
INSERT INTO users_groups (user_name, group_name)
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&vec![user_name.to_string(); groups.len()],

View File

@@ -1,4 +1,4 @@
use std::error::Error;
use std::{collections::HashSet, error::Error};
use serde::{Deserialize, Serialize};
use sqlx::{FromRow, PgPool, query, query_as};
@@ -12,20 +12,20 @@ pub struct User {
#[serde(default)]
pub disabled: bool,
#[serde(default)]
pub image: Option<String>,
pub picture: Option<String>,
}
impl User {
pub async fn select_by_name(
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let user = query_as!(
User,
r#"
SELECT name, display_name, password, email, disabled, image
FROM users
WHERE name = $1
SELECT name, display_name, password, email, disabled, picture
FROM glyph_users
WHERE name = $1
"#,
name
)
@@ -38,21 +38,21 @@ impl User {
pub async fn upsert(pool: &PgPool, user: &Self) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
INSERT INTO users (name, display_name, password, email, disabled, image)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled,
image = EXCLUDED.image
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled,
picture = EXCLUDED.picture
"#,
user.name,
user.display_name,
user.password,
user.email,
user.disabled,
user.image
user.picture
)
.execute(pool)
.await?;
@@ -60,14 +60,11 @@ impl User {
Ok(())
}
pub async fn delete_by_name(
pool: &PgPool,
name: &str,
) -> Result<(), Box<dyn Error + Send + Sync>> {
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
query!(
r#"
DELETE FROM users
WHERE name = $1
DELETE FROM glyph_users
WHERE name = $1
"#,
name
)
@@ -77,15 +74,15 @@ impl User {
Ok(())
}
pub async fn all_exist_by_names(
pub async fn all_exist(
pool: &PgPool,
names: &[String],
) -> Result<bool, Box<dyn Error + Send + Sync>> {
let row = query!(
r#"
SELECT COUNT(*) AS "count!"
FROM users
WHERE name = ANY($1)
SELECT COUNT(*) AS "count!"
FROM glyph_users
WHERE name = ANY($1)
"#,
names
)
@@ -105,26 +102,25 @@ pub struct UserWithGroups {
#[serde(default)]
pub disabled: bool,
#[serde(default)]
pub image: Option<String>,
pub picture: Option<String>,
#[serde(default)]
pub groups: Vec<String>,
}
impl UserWithGroups {
pub async fn select(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
let users = query_as!(
UserWithGroups,
r#"
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.image,
COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!"
FROM users u
LEFT JOIN users_groups ug ON u.name = ug.user_name
GROUP BY u.name, u.email, u.disabled, u.image
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.picture,
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
FROM glyph_users u
"#
)
.fetch_all(pool)
@@ -133,25 +129,23 @@ impl UserWithGroups {
Ok(users)
}
pub async fn select_by_name(
pub async fn select(
pool: &PgPool,
name: &str,
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
let user = query_as!(
UserWithGroups,
r#"
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.image,
COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!"
FROM users u
LEFT JOIN users_groups ug ON u.name = ug.user_name
WHERE u.name = $1
GROUP BY u.name, u.email, u.disabled, u.image
SELECT
u.name,
u.display_name,
u.password,
u.email,
u.disabled,
u.picture,
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
FROM glyph_users u
WHERE u.name = $1
"#,
name
)
@@ -168,23 +162,24 @@ impl UserWithGroups {
let mut tx = pool.begin().await?;
query!(
r#"INSERT INTO users (name, display_name, password, email, disabled, image)
VALUES ($1, $2, $3, $4, $5, $6)
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
"#,
user_with_groups.name,
user_with_groups.display_name,
user_with_groups.password,
user_with_groups.email,
user_with_groups.disabled,
user_with_groups.image
user_with_groups.picture
)
.execute(&mut *tx)
.await?;
query!(
r#"
INSERT INTO users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&user_with_groups.groups,
&vec![user_with_groups.name.clone(); user_with_groups.groups.len()]
@@ -196,4 +191,93 @@ impl UserWithGroups {
Ok(())
}
pub async fn upsert_many_delete_remaining(
pool: &PgPool,
users_with_groups: &[Self],
) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut tx = pool.begin().await?;
for user in users_with_groups {
query!(
r#"
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (name) DO UPDATE
SET display_name = EXCLUDED.display_name,
password = EXCLUDED.password,
email = EXCLUDED.email,
disabled = EXCLUDED.disabled,
picture = EXCLUDED.picture
"#,
user.name,
user.display_name,
user.password,
user.email,
user.disabled,
user.picture
)
.execute(&mut *tx)
.await?;
query!(
r#"
DELETE FROM glyph_users_groups
WHERE user_name = $1
"#,
user.name
)
.execute(&mut *tx)
.await?;
if !user.groups.is_empty() {
query!(
r#"
INSERT INTO glyph_users_groups (user_name, group_name)
SELECT * FROM UNNEST($1::text[], $2::text[])
"#,
&user.groups,
&vec![user.name.clone(); user.groups.len()]
)
.execute(&mut *tx)
.await?;
}
}
let users = users_with_groups
.iter()
.map(|user| user.name.clone())
.collect::<Vec<_>>();
query!(
r#"
DELETE FROM glyph_users
WHERE name <> ALL($1)
"#,
&users
)
.execute(&mut *tx)
.await?;
let groups = users_with_groups
.iter()
.flat_map(|user| user.groups.iter().cloned())
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
query!(
r#"
DELETE FROM glyph_groups
WHERE name <> ALL($1)
"#,
&groups
)
.execute(pool)
.await?;
tx.commit().await?;
Ok(())
}
}

View File

@@ -35,7 +35,7 @@ pub async fn get_all(
_: auth::User,
extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> {
let groups_with_users = models::groups::GroupWithUsers::select(&pg_pool)
let groups_with_users = models::groups::GroupWithUsers::select_all(&pg_pool)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
@@ -52,7 +52,7 @@ pub async fn get(
extract::Path(name): extract::Path<NonEmptyString>,
extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> {
let group_with_users = models::groups::GroupWithUsers::select_by_name(&pg_pool, name.as_str())
let group_with_users = models::groups::GroupWithUsers::select(&pg_pool, name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
@@ -71,7 +71,7 @@ pub async fn create(
extract::State(pg_pool): extract::State<PgPool>,
extract::Json(group_create): extract::Json<GroupCreate>,
) -> Result<impl IntoResponse, StatusCode> {
if models::groups::Group::select_by_name(&pg_pool, group_create.name.as_str())
if models::groups::Group::select(&pg_pool, group_create.name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.is_some()
@@ -85,7 +85,7 @@ pub async fn create(
.map(|u| u.to_string())
.collect::<Vec<_>>();
if !models::users::User::all_exist_by_names(&pg_pool, &users)
if !models::users::User::all_exist(&pg_pool, &users)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
@@ -116,7 +116,7 @@ pub async fn update(
extract::State(config): extract::State<Config>,
extract::Json(group_update): extract::Json<GroupUpdate>,
) -> Result<impl IntoResponse, StatusCode> {
let group = models::groups::Group::select_by_name(&pg_pool, name.as_str())
let group = models::groups::Group::select(&pg_pool, name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
@@ -126,7 +126,7 @@ pub async fn update(
if let Some(users) = &group_update.users {
let users = users.iter().map(ToString::to_string).collect::<Vec<_>>();
if !models::users::User::all_exist_by_names(&pg_pool, &users)
if !models::users::User::all_exist(&pg_pool, &users)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
@@ -163,12 +163,12 @@ pub async fn delete(
return Err(StatusCode::FORBIDDEN);
}
let group = models::groups::Group::select_by_name(&pg_pool, &name)
let group = models::groups::Group::select(&pg_pool, &name)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
Group::delete_by_name(&pg_pool, &group.name)
Group::delete(&pg_pool, &group.name)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;

View File

@@ -21,7 +21,7 @@ struct UserResponse {
display_name: String,
email: String,
disabled: bool,
image: Option<String>,
picture: Option<String>,
groups: Vec<String>,
}
@@ -31,7 +31,7 @@ impl From<models::users::UserWithGroups> for UserResponse {
display_name: user.display_name,
email: user.email,
disabled: user.disabled,
image: user.image,
picture: user.picture,
groups: user.groups,
}
}
@@ -43,7 +43,7 @@ pub async fn get_all(
_: auth::User,
extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> {
let users_with_groups = models::users::UserWithGroups::select(&pg_pool)
let users_with_groups = models::users::UserWithGroups::select_all(&pg_pool)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
@@ -60,7 +60,7 @@ pub async fn get(
extract::Path(name): extract::Path<NonEmptyString>,
extract::State(pg_pool): extract::State<PgPool>,
) -> Result<impl IntoResponse, StatusCode> {
let user_with_groups = models::users::UserWithGroups::select_by_name(&pg_pool, name.as_str())
let user_with_groups = models::users::UserWithGroups::select(&pg_pool, name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
@@ -74,7 +74,7 @@ pub struct UserCreate {
displayname: NonEmptyString,
email: NonEmptyString,
disabled: bool,
image: Option<NonEmptyString>,
picture: Option<NonEmptyString>,
groups: Vec<NonEmptyString>,
}
@@ -83,7 +83,7 @@ pub async fn create(
extract::State(pg_pool): extract::State<PgPool>,
extract::Json(user_create): extract::Json<UserCreate>,
) -> Result<impl IntoResponse, StatusCode> {
if models::users::User::select_by_name(&pg_pool, user_create.name.as_str())
if models::users::User::select(&pg_pool, user_create.name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.is_some()
@@ -97,7 +97,7 @@ pub async fn create(
.map(|g| g.to_string())
.collect::<Vec<_>>();
if !models::groups::Group::all_exist_by_names(&pg_pool, &groups)
if !models::groups::Group::all_exist(&pg_pool, &groups)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
@@ -110,7 +110,7 @@ pub async fn create(
password: generate_random_password_hash(),
email: user_create.email.to_string(),
disabled: user_create.disabled,
image: user_create.image.map(|i| i.to_string()),
picture: user_create.picture.map(|i| i.to_string()),
groups,
};
@@ -126,7 +126,7 @@ pub struct UserUpdate {
display_name: Option<NonEmptyString>,
email: Option<NonEmptyString>,
disabled: Option<bool>,
image: Option<NonEmptyString>,
picture: Option<NonEmptyString>,
groups: Option<Vec<NonEmptyString>>,
}
@@ -137,7 +137,7 @@ pub async fn update(
extract::State(config): extract::State<Config>,
extract::Json(user_update): extract::Json<UserUpdate>,
) -> Result<impl IntoResponse, StatusCode> {
let user = models::users::User::select_by_name(&pg_pool, name.as_str())
let user = models::users::User::select(&pg_pool, name.as_str())
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
@@ -150,7 +150,7 @@ pub async fn update(
.map(|g| g.to_string())
.collect::<Vec<_>>();
if !models::groups::Group::all_exist_by_names(&pg_pool, &groups)
if !models::groups::Group::all_exist(&pg_pool, &groups)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
{
@@ -183,7 +183,7 @@ pub async fn update(
.map(|e| e.to_string())
.unwrap_or(user.email),
disabled: user_update.disabled.unwrap_or(user.disabled),
image: user_update.image.map(|i| i.to_string()).or(user.image),
picture: user_update.picture.map(|i| i.to_string()).or(user.picture),
};
models::users::User::upsert(&pg_pool, &user)
@@ -206,12 +206,12 @@ pub async fn delete(
return Err(StatusCode::FORBIDDEN);
}
let user = models::users::User::select_by_name(&pg_pool, &name)
let user = models::users::User::select(&pg_pool, &name)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
.ok_or(StatusCode::NOT_FOUND)?;
models::users::User::delete_by_name(&pg_pool, &user.name)
models::users::User::delete(&pg_pool, &user.name)
.await
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;

View File

@@ -1,3 +1,5 @@
use std::{sync::Arc, time::Duration};
use async_redis_session::RedisSessionStore;
use axum::extract::FromRef;
use openidconnect::{
@@ -10,11 +12,14 @@ use openidconnect::{
},
reqwest,
};
use redis::{self, AsyncCommands};
use sqlx::{PgPool, postgres::PgPoolOptions};
use tokio::spawn;
use tokio::{process::Command, spawn, task::JoinHandle, time::sleep};
use crate::config::Config;
use crate::{
config::{Args, Config},
fuse::AutheliaFS,
models,
};
pub type OAuthClient<
HasAuthUrl = EndpointSet,
@@ -46,26 +51,44 @@ pub type OAuthClient<
#[derive(Clone)]
pub struct State {
pub config: Config,
pub oauth_http_client: reqwest::Client,
pub oauth_client: OAuthClient,
pub pg_pool: PgPool,
pub redis_client: redis::aio::MultiplexedConnection,
pub filesystem: AutheliaFS,
pub mount: Arc<JoinHandle<()>>,
pub authelia: Arc<JoinHandle<()>>,
pub oauth_http_client: reqwest::Client,
pub oauth_client: OAuthClient,
pub session_store: RedisSessionStore,
}
impl State {
pub async fn from_config(config: Config) -> Self {
let (oauth_http_client, oauth_client) = oauth_client(&config).await;
pub async fn from_args(args: Args) -> Self {
let config = Config::try_from(&args.config).unwrap();
let pg_pool = pg_pool(&config).await;
sqlx::migrate!("./migrations").run(&pg_pool).await.unwrap();
config.admin.upsert(&pg_pool).await.unwrap();
let redis_client = redis_client(&config).await;
let (filesystem, mount) = fuse(&config, &pg_pool).await;
let contents = models::authelia::Users::to_fuse(&pg_pool).await.unwrap();
filesystem.store(contents).await.unwrap();
let authelia = authelia(args.passthrough);
let (oauth_http_client, oauth_client) = oauth_client(&config).await;
let session_store = session_store(&config);
Self {
config,
oauth_http_client,
oauth_client,
pg_pool,
redis_client,
filesystem,
mount,
authelia,
oauth_http_client,
oauth_client,
session_store,
}
}
@@ -77,18 +100,6 @@ impl FromRef<State> for Config {
}
}
impl FromRef<State> for reqwest::Client {
fn from_ref(state: &State) -> Self {
state.oauth_http_client.clone()
}
}
impl FromRef<State> for OAuthClient {
fn from_ref(state: &State) -> Self {
state.oauth_client.clone()
}
}
impl FromRef<State> for PgPool {
fn from_ref(state: &State) -> Self {
state.pg_pool.clone()
@@ -101,42 +112,30 @@ impl FromRef<State> for redis::aio::MultiplexedConnection {
}
}
impl FromRef<State> for AutheliaFS {
fn from_ref(state: &State) -> Self {
state.filesystem.clone()
}
}
impl FromRef<State> for reqwest::Client {
fn from_ref(state: &State) -> Self {
state.oauth_http_client.clone()
}
}
impl FromRef<State> for OAuthClient {
fn from_ref(state: &State) -> Self {
state.oauth_client.clone()
}
}
impl FromRef<State> for RedisSessionStore {
fn from_ref(state: &State) -> Self {
state.session_store.clone()
}
}
async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) {
let oauth_http_client = reqwest::ClientBuilder::new()
.redirect(reqwest::redirect::Policy::none())
.danger_accept_invalid_certs(config.oauth.insecure)
.build()
.unwrap();
let provider_metadata = CoreProviderMetadata::discover_async(
IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(),
&oauth_http_client,
)
.await
.unwrap();
let oauth_client = OAuthClient::from_provider_metadata(
provider_metadata,
ClientId::new(config.oauth.client_id.clone()),
Some(ClientSecret::new(config.oauth.client_secret.clone())),
)
.set_redirect_uri(
RedirectUrl::new(format!(
"{}{}/api/auth/callback",
config.server.host, config.server.subpath
))
.unwrap(),
);
(oauth_http_client, oauth_client)
}
async fn pg_pool(config: &Config) -> PgPool {
PgPoolOptions::new()
.max_connections(5)
@@ -159,43 +158,86 @@ async fn redis_client(config: &Config) -> redis::aio::MultiplexedConnection {
);
let client = redis::Client::open(url).unwrap();
let mut connection = client.get_multiplexed_async_connection().await.unwrap();
client.get_multiplexed_async_connection().await.unwrap()
}
let _: () = redis::cmd("CONFIG")
.arg("SET")
.arg("notify-keyspace-events")
.arg("Ex")
.query_async(&mut connection)
.await
async fn fuse(config: &Config, pg_pool: &PgPool) -> (AutheliaFS, Arc<JoinHandle<()>>) {
let fs = AutheliaFS::new(
config.fuse.clone(),
Some(Box::new(models::authelia::Users::from_fuse)),
pg_pool.clone(),
)
.await;
let fs_clone = fs.clone();
let mount = Arc::new(spawn(async move {
loop {
let _ = fs_clone.clone().run().await;
}
}));
(fs, mount)
}
fn authelia(args: Vec<String>) -> Arc<JoinHandle<()>> {
Arc::new(spawn(async move {
loop {
let _ = Command::new("authelia")
.args(args.clone())
.spawn()
.unwrap()
.wait()
.await;
}
}))
}
async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) {
let oauth_http_client = reqwest::ClientBuilder::new()
.redirect(reqwest::redirect::Policy::none())
.danger_accept_invalid_certs(config.oauth.insecure)
.build()
.unwrap();
let database = config.redis.database.to_string();
spawn(async move {
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
let rconfig = redis::AsyncConnectionConfig::new().set_push_sender(tx);
let mut connection = client
.get_multiplexed_async_connection_with_config(&rconfig)
.await
.unwrap();
let mut provider_metadata = None;
let channel = format!("__keyevent@{database}__:expired");
connection.subscribe(&[channel]).await.unwrap();
let retries = 10;
let mut backoff = Duration::from_secs(1);
while let Some(msg) = rx.recv().await {
if let Some(msg) = redis::Msg::from_push_info(msg) {
if let Ok(key) = msg.get_payload::<String>() {
if !key.starts_with("invite:") {
continue;
}
let id = key.trim_start_matches("invite:").to_string();
let _: i64 = connection.srem("invite:all", id).await.unwrap();
}
}
for i in 0..retries {
if let Ok(metadata) = CoreProviderMetadata::discover_async(
IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(),
&oauth_http_client,
)
.await
{
provider_metadata = Some(metadata);
break;
}
if i == retries - 1 {
break;
}
});
connection
sleep(backoff).await;
backoff *= 2;
}
let provider_metadata = provider_metadata.unwrap();
let oauth_client = OAuthClient::from_provider_metadata(
provider_metadata,
ClientId::new(config.oauth.client_id.clone()),
Some(ClientSecret::new(config.oauth.client_secret.clone())),
)
.set_redirect_uri(
RedirectUrl::new(format!(
"{}{}/api/auth/callback",
config.server.host, config.server.subpath
))
.unwrap(),
);
(oauth_http_client, oauth_client)
}
fn session_store(config: &Config) -> RedisSessionStore {

View File

@@ -33,3 +33,20 @@ pub fn generate_random_password_hash() -> String {
password_hash
}
pub fn hash_password(password: &str) -> String {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::new(
argon2::Algorithm::Argon2id,
argon2::Version::V0x13,
argon2::Params::new(65536, 3, 4, Some(32)).unwrap(),
);
let password_hash = argon2
.hash_password(password.as_bytes(), &salt)
.unwrap()
.to_string();
password_hash
}

View File

@@ -1 +1,21 @@
use tokio::{select, signal};
pub mod crypto;
pub async fn shutdown_signal() {
let ctrl_c = async {
signal::ctrl_c().await.unwrap();
};
let terminate = async {
signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
.unwrap()
.recv()
.await;
};
select! {
() = ctrl_c => {},
() = terminate => {},
}
}

View File

@@ -2,7 +2,7 @@ FROM docker.io/library/rust AS builder
ARG BUILD_MODE=debug
RUN apt-get update && apt-get clean
RUN apt-get update && apt-get install -y fuse3 libfuse3-dev && apt-get clean
WORKDIR /app
@@ -18,11 +18,11 @@ COPY .sqlx ./.sqlx
RUN cargo build $(if [ "$BUILD_MODE" = "release" ]; then echo "--release"; else echo ""; fi)
RUN mkdir -p build && cp target/$(if [ "$BUILD_MODE" = "release" ]; then echo "release"; else echo "debug"; fi)/glyph build/glyph
FROM docker.io/library/debian:bookworm-slim
FROM docker.io/authelia/authelia
COPY --from=builder /app/build/glyph /usr/local/bin/glyph
COPY --from=builder /app/build/glyph /usr/bin/glyph
COPY --from=builder /usr/lib/x86_64-linux-gnu/libfuse3.so.3 /usr/lib/x86_64-linux-gnu/libfuse3.so.3
COPY --from=builder /usr/lib/x86_64-linux-gnu/libgcc_s.so.1 /usr/lib/x86_64-linux-gnu/libgcc_s.so.1
EXPOSE 8080/tcp
ENTRYPOINT ["/usr/local/bin/glyph"]
ENTRYPOINT ["/usr/bin/glyph"]
CMD ["--help"]

View File

@@ -6,11 +6,21 @@ spec:
containers:
- name: glyph
image: registry.karaolidis.com/karaolidis/glyph:latest
securityContext:
privileged: true
capabilities:
add:
- SYS_ADMIN
resources:
limits:
podman.io/device=/dev/fuse: 1
volumeMounts:
- name: glyph-config
mountPath: /etc/glyph
- name: authelia-users
mountPath: /etc/authelia/users
- name: authelia-config
mountPath: /etc/authelia/config
- name: authelia-storage
mountPath: /var/lib/authelia
command:
[
"glyph",
@@ -18,6 +28,9 @@ spec:
"/etc/glyph/default.yml",
--log-config,
"/etc/glyph/log4rs.yml",
"--",
"--config",
"/etc/authelia/config/configuration.yml",
]
- name: postgresql
@@ -36,22 +49,6 @@ spec:
- name: redis
image: docker.io/library/redis:latest
- name: authelia
image: docker.io/authelia/authelia:latest
volumeMounts:
- name: authelia-config
mountPath: /etc/authelia
- name: authelia-users
mountPath: /etc/authelia/users
- name: authelia-storage
mountPath: /var/lib/authelia
command:
[
"/bin/sh",
"-c",
"cp /etc/authelia/users.yml /etc/authelia/users/users.yml && exec authelia --config /etc/authelia/configuration.yml",
]
- name: traefik
image: docker.io/library/traefik:latest
args:
@@ -72,13 +69,11 @@ spec:
- name: authelia-config
configMap:
name: authelia-config
- name: authelia-users
emptyDir: {}
- name: authelia-storage
emptyDir: {}
- name: traefik-config
configMap:
name: traefik-config
- name: authelia-storage
emptyDir: {}
---
apiVersion: v1
kind: ConfigMap
@@ -89,13 +84,6 @@ data:
server:
host: https://app.glyph.local
database:
host: postgresql
port: 5432
user: glyph
password: glyph
database: glyph
oauth:
issuer_url: https://id.glyph.local
client_id: glyph
@@ -103,13 +91,27 @@ data:
admin_group: admins
insecure: true
authelia:
user_database: /etc/authelia/users/users.yml
fuse:
mount_directory: /etc/authelia/users
user_database_name: users.yml
postgresql:
host: postgresql
port: 5432
user: glyph
password: glyph
database: glyph
redis:
host: redis
port: 6379
admin:
name: glyph
display_name: Glyph
password: glyph
email: glyph@karaolidis.com
log4rs.yml: |
appenders:
stdout:
@@ -198,13 +200,6 @@ data:
redirect_uris:
- "https://app.glyph.local/api/auth/callback"
authorization_policy: "one_factor"
users.yml: |
users:
glyph:
displayname: "glyph"
password: "$argon2id$v=19$m=65536,t=3,p=4$lobLBhv2SKyVZZZCl+e8Lg$VzPmcTksXBNlJfeztMUqMDgdU47qT5bB1Gk+QHigASQ" # The digest of 'glyph'.
groups:
- "admins"
---
apiVersion: v1
kind: ConfigMap
@@ -232,7 +227,7 @@ data:
authelia-service:
loadBalancer:
servers:
- url: "http://authelia:9091"
- url: "http://glyph:9091"
glyph-service:
loadBalancer: