Add fuse callbacks
Signed-off-by: Nikolaos Karaolidis <nick@karaolidis.com>
This commit is contained in:
15
.sqlx/query-019256af8ccf4fc3f1ad6daa0ed3bc945f141020837732c9cf680fbcc438c6a8.json
generated
Normal file
15
.sqlx/query-019256af8ccf4fc3f1ad6daa0ed3bc945f141020837732c9cf680fbcc438c6a8.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "019256af8ccf4fc3f1ad6daa0ed3bc945f141020837732c9cf680fbcc438c6a8"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n GROUP BY g.name\n ",
|
||||
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -22,5 +22,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "e52660da218cabe80565d95bf77add43558dc3a99c29246cf61d2431ddf34cf8"
|
||||
"hash": "1493410c6cf4f7a4cadadf5321f42ad265282d3072eded0b9caaa3dc81ab8b45"
|
||||
}
|
15
.sqlx/query-1bf35e562ef97408038259aca7b36f719b5f5697efbce538bf3f4eefec6b8d16.json
generated
Normal file
15
.sqlx/query-1bf35e562ef97408038259aca7b36f719b5f5697efbce538bf3f4eefec6b8d16.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "1bf35e562ef97408038259aca7b36f719b5f5697efbce538bf3f4eefec6b8d16"
|
||||
}
|
14
.sqlx/query-244c0ca382a0bd8040667c05b457d2b55f15670d696faba7d57f42090b040378.json
generated
Normal file
14
.sqlx/query-244c0ca382a0bd8040667c05b457d2b55f15670d696faba7d57f42090b040378.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_groups\n WHERE name <> ALL($1)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "244c0ca382a0bd8040667c05b457d2b55f15670d696faba7d57f42090b040378"
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM groups\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "275592cdd00626bcb0c5c3054952b6cd170d0692354100d0a1c25c2dba9e9e6b"
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM users\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "282189b1fc3f70e5c2de3f19a3cc8b1fe7e32e4b9b501674ea138acf0cd759ff"
|
||||
}
|
19
.sqlx/query-3613d8c9b991fb1d39ad99ed36778e5ba9933ca3cf0f064ae5f139cee1cdad42.json
generated
Normal file
19
.sqlx/query-3613d8c9b991fb1d39ad99ed36778e5ba9933ca3cf0f064ae5f139cee1cdad42.json
generated
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "3613d8c9b991fb1d39ad99ed36778e5ba9933ca3cf0f064ae5f139cee1cdad42"
|
||||
}
|
19
.sqlx/query-364208fb0f4ef56a2ea755481a0d994fed588ff998524521936306e772ae8dce.json
generated
Normal file
19
.sqlx/query-364208fb0f4ef56a2ea755481a0d994fed588ff998524521936306e772ae8dce.json
generated
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n picture = EXCLUDED.picture\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "364208fb0f4ef56a2ea755481a0d994fed588ff998524521936306e772ae8dce"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM users\n WHERE name = ANY($1)\n ",
|
||||
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_groups\n WHERE name = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -18,5 +18,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "090673660f991b66b0b5a7e2492e94011405a313f89943cff7e64e3ccc674822"
|
||||
"hash": "3a6bd1951cac5c82e67fa3610aa90984810cb3e5d596ec0f864ae5aa2631816a"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT name, display_name, password, email, disabled, image\n FROM users\n WHERE name = $1\n ",
|
||||
"query": "\n SELECT name, display_name, password, email, disabled, picture\n FROM glyph_users\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -30,7 +30,7 @@
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "image",
|
||||
"name": "picture",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
@@ -48,5 +48,5 @@
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "74d4ef98ee975bfe90418171dea43397316f8d57ac4d9b09248bb5b0f767b166"
|
||||
"hash": "538bac60e4dff5453e9d69f8b94dd623844f4ab89b2963c625f6c47e2dca9c02"
|
||||
}
|
14
.sqlx/query-5d457b24e090dd2d3be512de08706387e62e4ec7997f60c3958572cce8985c27.json
generated
Normal file
14
.sqlx/query-5d457b24e090dd2d3be512de08706387e62e4ec7997f60c3958572cce8985c27.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5d457b24e090dd2d3be512de08706387e62e4ec7997f60c3958572cce8985c27"
|
||||
}
|
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled,\n image = EXCLUDED.image\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "5dbde6bba584448a7be9fd6965aec52a8050d21c453d7ec221be44bd0d893fd1"
|
||||
}
|
15
.sqlx/query-6830e994b974d76fdcf51cf0e540ce0fa79b58f8eaf0c7ecb1a22e6fc1ebf505.json
generated
Normal file
15
.sqlx/query-6830e994b974d76fdcf51cf0e540ce0fa79b58f8eaf0c7ecb1a22e6fc1ebf505.json
generated
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6830e994b974d76fdcf51cf0e540ce0fa79b58f8eaf0c7ecb1a22e6fc1ebf505"
|
||||
}
|
14
.sqlx/query-6a626592b97a77a5804125753527fc9451fa39565b363d2e54ee06b2f36b177f.json
generated
Normal file
14
.sqlx/query-6a626592b97a77a5804125753527fc9451fa39565b363d2e54ee06b2f36b177f.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_groups\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "6a626592b97a77a5804125753527fc9451fa39565b363d2e54ee06b2f36b177f"
|
||||
}
|
14
.sqlx/query-8f1394702f150d3642129fcca8417e5e01911c13191be44339dc36a6c4978db2.json
generated
Normal file
14
.sqlx/query-8f1394702f150d3642129fcca8417e5e01911c13191be44339dc36a6c4978db2.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_users\n WHERE name <> ALL($1)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "8f1394702f150d3642129fcca8417e5e01911c13191be44339dc36a6c4978db2"
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO users_groups (user_name, group_name)\n SELECT * FROM UNNEST($1::text[], $2::text[])\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "91b332e6af78793ae53cfdbf8e5edccfe031a21ad1ca8240024adb7e0006570b"
|
||||
}
|
14
.sqlx/query-a15ceb9e5c596ce9639a79436e9e642c65c2cac61054b2fdaa7190c0e8acf0d0.json
generated
Normal file
14
.sqlx/query-a15ceb9e5c596ce9639a79436e9e642c65c2cac61054b2fdaa7190c0e8acf0d0.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_users_groups\n WHERE user_name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "a15ceb9e5c596ce9639a79436e9e642c65c2cac61054b2fdaa7190c0e8acf0d0"
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM users_groups\n WHERE group_name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "adb2455e26b1cddf90a54d08e79f57258db1212ef4120868581cd0a8a81eff8f"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO groups (name) VALUES ($1)",
|
||||
"query": "INSERT INTO glyph_groups (name) VALUES ($1)",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
@@ -10,5 +10,5 @@
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "b1be2a377b5bfaf093618d049c0ed8b759f946580870558c699cce9490a0e0f2"
|
||||
"hash": "af519bc617842e3d48a976f9aa4b107b2690b0a259ee02b133985e3eb00a4165"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n WHERE u.name = $1\n GROUP BY u.name, u.email, u.disabled, u.image\n ",
|
||||
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n WHERE u.name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -30,7 +30,7 @@
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "image",
|
||||
"name": "picture",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
@@ -54,5 +54,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "9313aac97fa5191c47874e2e3834ca713d3a3b5556ac26c3cc51ee138f411982"
|
||||
"hash": "b0b3c6daf78b7a04e75b781d997b61a56fbc8005fe1e419febb87b5247f44ade"
|
||||
}
|
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "INSERT INTO users (name, display_name, password, email, disabled, image)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "ba1cb3d9ffd5dd2260815616abc0b93cd67767cf299f443023d8ab9f9a12c44c"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.image,\n COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS \"groups!\"\n FROM users u\n LEFT JOIN users_groups ug ON u.name = ug.user_name\n GROUP BY u.name, u.email, u.disabled, u.image\n ",
|
||||
"query": "\n SELECT\n u.name,\n u.display_name,\n u.password,\n u.email,\n u.disabled,\n u.picture,\n ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS \"groups!\"\n FROM glyph_users u\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -30,7 +30,7 @@
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "image",
|
||||
"name": "picture",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
@@ -52,5 +52,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "95bbd23a12bf44b1bc31859a1fd324c16d76ec2797f68da75fc6e526a3cd0bc4"
|
||||
"hash": "bc5847efd81251c0e4b524b84f2485ebbd0cd0a813d364815858add87d0e07a2"
|
||||
}
|
14
.sqlx/query-cd8831c93f8714f5242bf0b3dae6240a46b37d5d163414bd739fa0025b6de0a5.json
generated
Normal file
14
.sqlx/query-cd8831c93f8714f5242bf0b3dae6240a46b37d5d163414bd739fa0025b6de0a5.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_users_groups\n WHERE group_name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "cd8831c93f8714f5242bf0b3dae6240a46b37d5d163414bd739fa0025b6de0a5"
|
||||
}
|
19
.sqlx/query-ce1fade2aaf62ce5a3b1448d92517d73533dd9b098d4dfb679a0e77e4a42b3e6.json
generated
Normal file
19
.sqlx/query-ce1fade2aaf62ce5a3b1448d92517d73533dd9b098d4dfb679a0e77e4a42b3e6.json
generated
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "ce1fade2aaf62ce5a3b1448d92517d73533dd9b098d4dfb679a0e77e4a42b3e6"
|
||||
}
|
18
.sqlx/query-e1f9c5d85717bdf150dd2352196d7477db1f00a8776777702227a3a6ef8a8c4a.json
generated
Normal file
18
.sqlx/query-e1f9c5d85717bdf150dd2352196d7477db1f00a8776777702227a3a6ef8a8c4a.json
generated
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO glyph_users (name, display_name, password, email, disabled)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (name) DO UPDATE\n SET display_name = EXCLUDED.display_name,\n password = EXCLUDED.password,\n email = EXCLUDED.email,\n disabled = EXCLUDED.disabled\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Text",
|
||||
"Bool"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e1f9c5d85717bdf150dd2352196d7477db1f00a8776777702227a3a6ef8a8c4a"
|
||||
}
|
14
.sqlx/query-e355e946faf174f24ffb3bdb4cf3e6f3047431316b1e6ef752e2c33a0e0a0c07.json
generated
Normal file
14
.sqlx/query-e355e946faf174f24ffb3bdb4cf3e6f3047431316b1e6ef752e2c33a0e0a0c07.json
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM glyph_users\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e355e946faf174f24ffb3bdb4cf3e6f3047431316b1e6ef752e2c33a0e0a0c07"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM groups\n WHERE name = ANY($1)\n ",
|
||||
"query": "\n SELECT COUNT(*) AS \"count!\"\n FROM glyph_users\n WHERE name = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -18,5 +18,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "9caa0dac7d2a5098a09278e2331e86d87b1e4a6916836ca0d1a0509a159affc8"
|
||||
"hash": "e53bf6042de37af5560c1861dc4056827cf4ea87449f209dac15ba5d6bfc1704"
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n DELETE FROM users_groups\n WHERE user_name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "e7258b575bc6d1d71f9c62a9c6b56f6103ab7caebc26886346e4ecec399bd86c"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT name\n FROM groups\n WHERE name = $1\n ",
|
||||
"query": "\n SELECT name\n FROM glyph_groups\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -18,5 +18,5 @@
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "19d85e2094bcb4ac818975b9477f4cc3de4128ef0aa3383369092f2df56636d9"
|
||||
"hash": "ee2e54ee09eb411a441931e5f9070f9216093191fb46f25cb0b237ef95c92c5d"
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n g.name,\n COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS \"users!\"\n FROM groups g\n LEFT JOIN users_groups ug ON g.name = ug.group_name\n WHERE g.name = $1\n GROUP BY g.name\n ",
|
||||
"query": "\n SELECT\n g.name,\n ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS \"users!\"\n FROM glyph_groups g\n WHERE g.name = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -24,5 +24,5 @@
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "52bcae42b069a7665baeff903774e624f3e7ae6e2474d03c8619fa1816edefe0"
|
||||
"hash": "f769fe0d94fe52430004b327c1afe839d61eed8b53836bf4f091c9c56fa1cf17"
|
||||
}
|
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -1260,7 +1260,6 @@ dependencies = [
|
||||
"redis 0.31.0",
|
||||
"redis-macros",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"sqlx",
|
||||
"time",
|
||||
|
@@ -20,7 +20,7 @@ async-session = "3.0.0"
|
||||
axum = { version = "0.8.4", features = ["macros"] }
|
||||
axum-extra = { version = "0.10.1", features = ["typed-header"] }
|
||||
clap = { version = "4.5.39", features = ["derive"] }
|
||||
fuser = "0.15.1"
|
||||
fuser = { version = "0.15.1", features = ["abi-7-31"] }
|
||||
libc = "0.2.172"
|
||||
log = "0.4.27"
|
||||
log4rs = "1.3.0"
|
||||
@@ -31,9 +31,8 @@ passwords = "3.1.16"
|
||||
redis = { version = "0.31.0", features = ["tokio-comp"] }
|
||||
redis-macros = "0.5.4"
|
||||
serde = "1.0.219"
|
||||
serde_json = "1.0.140"
|
||||
serde_yaml = "0.9.34"
|
||||
sqlx = { version = "0.8.6", features = ["runtime-tokio", "postgres", "time", "uuid"] }
|
||||
time = { version = "0.3.41", features = ["serde"] }
|
||||
tokio = { version = "1.45.1", features = ["rt-multi-thread", "process"] }
|
||||
tokio = { version = "1.45.1", features = ["rt-multi-thread", "process", "signal"] }
|
||||
uuid = { version = "1.17.0", features = ["serde"] }
|
||||
|
@@ -1,29 +1,29 @@
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
CREATE TABLE IF NOT EXISTS glyph_users (
|
||||
name TEXT PRIMARY KEY,
|
||||
display_name TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
disabled BOOLEAN NOT NULL,
|
||||
image TEXT,
|
||||
picture TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS groups (
|
||||
CREATE TABLE IF NOT EXISTS glyph_groups (
|
||||
name TEXT PRIMARY KEY,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users_groups (
|
||||
CREATE TABLE IF NOT EXISTS glyph_users_groups (
|
||||
user_name TEXT NOT NULL,
|
||||
group_name TEXT NOT NULL,
|
||||
PRIMARY KEY (user_name, group_name),
|
||||
FOREIGN KEY (user_name) REFERENCES users(name) ON DELETE CASCADE,
|
||||
FOREIGN KEY (group_name) REFERENCES groups(name) ON DELETE CASCADE
|
||||
FOREIGN KEY (user_name) REFERENCES glyph_users(name) ON DELETE CASCADE,
|
||||
FOREIGN KEY (group_name) REFERENCES glyph_groups(name) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_timestamp()
|
||||
CREATE OR REPLACE FUNCTION glyph_update_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
@@ -31,26 +31,26 @@ BEGIN
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_users_timestamp
|
||||
BEFORE UPDATE ON users
|
||||
CREATE OR REPLACE TRIGGER glyph_update_users_timestamp
|
||||
BEFORE UPDATE ON glyph_users
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_timestamp();
|
||||
EXECUTE FUNCTION glyph_update_timestamp();
|
||||
|
||||
CREATE TRIGGER update_groups_timestamp
|
||||
BEFORE UPDATE ON groups
|
||||
CREATE OR REPLACE TRIGGER glyph_update_groups_timestamp
|
||||
BEFORE UPDATE ON glyph_groups
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_timestamp();
|
||||
EXECUTE FUNCTION glyph_update_timestamp();
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_users_groups_timestamp()
|
||||
CREATE OR REPLACE FUNCTION glyph_update_users_groups_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE users SET updated_at = NOW() WHERE name = NEW.user_name;
|
||||
UPDATE groups SET updated_at = NOW() WHERE name = NEW.group_name;
|
||||
UPDATE glyph_users SET updated_at = NOW() WHERE name = NEW.user_name;
|
||||
UPDATE glyph_groups SET updated_at = NOW() WHERE name = NEW.group_name;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER update_users_groups_timestamp
|
||||
AFTER INSERT OR DELETE ON users_groups
|
||||
CREATE OR REPLACE TRIGGER glyph_update_users_groups_timestamp
|
||||
AFTER INSERT OR DELETE ON glyph_users_groups
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_users_groups_timestamp();
|
||||
EXECUTE FUNCTION glyph_update_users_groups_timestamp();
|
||||
|
@@ -1,5 +1,6 @@
|
||||
use clap::Parser;
|
||||
use serde::Deserialize;
|
||||
use sqlx::query;
|
||||
use std::{
|
||||
error::Error,
|
||||
fs,
|
||||
@@ -7,6 +8,8 @@ use std::{
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use crate::utils::crypto::hash_password;
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct ServerConfig {
|
||||
pub host: String,
|
||||
@@ -59,6 +62,40 @@ pub struct RedisConfig {
|
||||
pub database: u8,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct AdminConfig {
|
||||
pub name: String,
|
||||
pub display_name: String,
|
||||
pub password: String,
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
impl AdminConfig {
|
||||
pub async fn upsert(&self, pool: &sqlx::PgPool) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
let password = hash_password(&self.password);
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO glyph_users (name, display_name, password, email, disabled)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
ON CONFLICT (name) DO UPDATE
|
||||
SET display_name = EXCLUDED.display_name,
|
||||
password = EXCLUDED.password,
|
||||
email = EXCLUDED.email,
|
||||
disabled = EXCLUDED.disabled
|
||||
"#,
|
||||
self.name,
|
||||
self.display_name,
|
||||
password,
|
||||
self.email,
|
||||
false
|
||||
)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
pub struct Config {
|
||||
pub server: ServerConfig,
|
||||
@@ -66,6 +103,7 @@ pub struct Config {
|
||||
pub fuse: FuseConfig,
|
||||
pub postgresql: PostgresqlConfig,
|
||||
pub redis: RedisConfig,
|
||||
pub admin: AdminConfig,
|
||||
}
|
||||
|
||||
impl TryFrom<&PathBuf> for Config {
|
||||
@@ -87,4 +125,7 @@ pub struct Args {
|
||||
/// Path to the log4rs config file
|
||||
#[arg(short, long, value_name = "FILE", default_value = "log4rs.yaml")]
|
||||
pub log_config: PathBuf,
|
||||
/// Additional arguments to pass to Authelia
|
||||
#[arg(last = true, num_args = 0.., allow_hyphen_values = true)]
|
||||
pub passthrough: Vec<String>,
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@
|
||||
use std::{
|
||||
cmp,
|
||||
collections::HashMap,
|
||||
error::Error,
|
||||
ffi::CString,
|
||||
mem::MaybeUninit,
|
||||
ops::Deref,
|
||||
@@ -11,17 +12,20 @@ use std::{
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use fuser::{FileType, Filesystem};
|
||||
use fuser::{FileType, Filesystem, Notifier, Session};
|
||||
use libc::{
|
||||
EACCES, EINVAL, EISDIR, ENOENT, ENOSYS, ENOTDIR, EPERM, O_ACCMODE, O_APPEND, O_RDONLY, O_TRUNC,
|
||||
O_WRONLY, R_OK, W_OK, X_OK, c_int, gid_t, uid_t,
|
||||
};
|
||||
use parking_lot::{RwLock, RwLockWriteGuard};
|
||||
use sqlx::PgPool;
|
||||
use tokio::{fs, task::spawn_blocking};
|
||||
|
||||
use crate::config::FuseConfig;
|
||||
|
||||
type WriteCallback = Box<dyn Fn(&str) + Send + Sync>;
|
||||
type WriteCallback = Box<dyn Fn(&PgPool, &str) + Send + Sync>;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct StaticState {
|
||||
creation_time: SystemTime,
|
||||
user: u32,
|
||||
@@ -29,12 +33,14 @@ struct StaticState {
|
||||
block_size: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct VariableState {
|
||||
contents: String,
|
||||
access_time: SystemTime,
|
||||
modification_time: SystemTime,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Handle {
|
||||
inode: u64,
|
||||
uid: u32,
|
||||
@@ -42,6 +48,7 @@ struct Handle {
|
||||
cursor: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Handles {
|
||||
handles: HashMap<u64, Handle>,
|
||||
next_handle: u64,
|
||||
@@ -55,12 +62,15 @@ impl Handles {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AutheliaFS {
|
||||
config: FuseConfig,
|
||||
write_callback: Option<WriteCallback>,
|
||||
static_state: Arc<StaticState>,
|
||||
static_state: StaticState,
|
||||
variable_state: Arc<RwLock<VariableState>>,
|
||||
handles: Arc<RwLock<Handles>>,
|
||||
write_callback: Arc<RwLock<WriteCallback>>,
|
||||
notifier: Arc<RwLock<Option<Notifier>>>,
|
||||
pg_pool: PgPool,
|
||||
}
|
||||
|
||||
const TTL: Duration = Duration::from_secs(1);
|
||||
@@ -222,13 +232,19 @@ enum HandleCheckResult {
|
||||
}
|
||||
|
||||
impl AutheliaFS {
|
||||
pub fn new(config: FuseConfig, write_callback: Option<WriteCallback>) -> Self {
|
||||
pub async fn new(
|
||||
config: FuseConfig,
|
||||
write_callback: Option<WriteCallback>,
|
||||
pg_pool: PgPool,
|
||||
) -> Self {
|
||||
let contents = String::new();
|
||||
let time = SystemTime::now();
|
||||
|
||||
let uid = getuid();
|
||||
let gid = getgid();
|
||||
|
||||
let _ = fs::create_dir_all(&config.mount_directory).await;
|
||||
|
||||
let block_size = u32::try_from(
|
||||
stat(config.mount_directory.to_str().unwrap())
|
||||
.unwrap()
|
||||
@@ -236,14 +252,14 @@ impl AutheliaFS {
|
||||
)
|
||||
.unwrap_or(4096);
|
||||
|
||||
let static_file_state = Arc::new(StaticState {
|
||||
let static_state = StaticState {
|
||||
creation_time: time,
|
||||
user: uid,
|
||||
group: gid,
|
||||
block_size,
|
||||
});
|
||||
};
|
||||
|
||||
let variable_file_state = Arc::new(RwLock::new(VariableState {
|
||||
let variable_state = Arc::new(RwLock::new(VariableState {
|
||||
contents,
|
||||
access_time: time,
|
||||
modification_time: time,
|
||||
@@ -254,18 +270,48 @@ impl AutheliaFS {
|
||||
next_handle: 1,
|
||||
}));
|
||||
|
||||
let write_callback = Arc::new(RwLock::new(
|
||||
write_callback.unwrap_or_else(|| Box::new(|_, _| {})),
|
||||
));
|
||||
|
||||
let notifier = Arc::new(RwLock::new(None));
|
||||
|
||||
Self {
|
||||
config,
|
||||
write_callback,
|
||||
variable_state: variable_file_state,
|
||||
static_state: static_file_state,
|
||||
static_state,
|
||||
variable_state,
|
||||
handles,
|
||||
write_callback,
|
||||
notifier,
|
||||
pg_pool,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mount(self) -> std::io::Result<()> {
|
||||
let mountpoint = self.config.mount_directory.clone();
|
||||
fuser::mount2(self, mountpoint, &vec![])
|
||||
pub async fn run(self) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
let _ = fs::create_dir_all(&self.config.mount_directory).await;
|
||||
let mut session = Session::new(self.clone(), self.config.mount_directory.clone(), &[])?;
|
||||
self.notifier.write().replace(session.notifier());
|
||||
Ok(spawn_blocking(move || session.run().unwrap()).await?)
|
||||
}
|
||||
|
||||
pub async fn store(&self, contents: String) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
let variable_state = self.variable_state.clone();
|
||||
let notifier = self.notifier.clone();
|
||||
|
||||
Ok(spawn_blocking(move || {
|
||||
let mut variable_state = variable_state.write();
|
||||
|
||||
variable_state.contents = contents;
|
||||
variable_state.modification_time = SystemTime::now();
|
||||
variable_state.access_time = SystemTime::now();
|
||||
|
||||
if let Some(notifier) = notifier.write().as_ref() {
|
||||
notifier
|
||||
.store(USERS_FILE_INODE, 0, variable_state.contents.as_bytes())
|
||||
.unwrap();
|
||||
}
|
||||
})
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[allow(clippy::fn_params_excessive_bools)]
|
||||
@@ -483,30 +529,36 @@ impl Filesystem for AutheliaFS {
|
||||
return;
|
||||
}
|
||||
|
||||
if size.is_some() && (atime.is_some() || mtime.is_some()) {
|
||||
let mut variable_state = self.variable_state.write();
|
||||
|
||||
if let Some(size) = size {
|
||||
if size == 0 {
|
||||
let mut variable_file_state = self.variable_state.write();
|
||||
variable_file_state.contents.clear();
|
||||
variable_state.contents.clear();
|
||||
} else {
|
||||
reply.error(ENOSYS);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if mtime.is_some() || atime.is_some() {
|
||||
let mut variable_file_state = self.variable_state.write();
|
||||
|
||||
variable_file_state.modification_time = match mtime {
|
||||
variable_state.modification_time = match mtime {
|
||||
Some(fuser::TimeOrNow::Now) => SystemTime::now(),
|
||||
Some(fuser::TimeOrNow::SpecificTime(time)) => time,
|
||||
None => variable_file_state.modification_time,
|
||||
None => variable_state.modification_time,
|
||||
};
|
||||
|
||||
variable_file_state.access_time = match atime {
|
||||
variable_state.access_time = match atime {
|
||||
Some(fuser::TimeOrNow::Now) => SystemTime::now(),
|
||||
Some(fuser::TimeOrNow::SpecificTime(time)) => time,
|
||||
None => variable_file_state.access_time,
|
||||
None => variable_state.access_time,
|
||||
};
|
||||
|
||||
self.notifier
|
||||
.write()
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.store(ino, 0, variable_state.contents.as_bytes())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let attr = file.to_file_attr(self);
|
||||
@@ -544,8 +596,8 @@ impl Filesystem for AutheliaFS {
|
||||
drop(handles);
|
||||
|
||||
if flags & O_TRUNC != 0 && flags & O_ACCMODE != O_RDONLY {
|
||||
let mut variable_file_state = self.variable_state.write();
|
||||
variable_file_state.contents.clear();
|
||||
let mut variable_state = self.variable_state.write();
|
||||
variable_state.contents.clear();
|
||||
}
|
||||
|
||||
reply.opened(handle, 0);
|
||||
@@ -578,11 +630,11 @@ impl Filesystem for AutheliaFS {
|
||||
AccessCheckResult::Ok(_) => {}
|
||||
}
|
||||
|
||||
let mut variable_file_state = self.variable_state.write();
|
||||
variable_file_state.access_time = SystemTime::now();
|
||||
let mut variable_state = self.variable_state.write();
|
||||
variable_state.access_time = SystemTime::now();
|
||||
|
||||
let variable_file_state = RwLockWriteGuard::downgrade(variable_file_state);
|
||||
let contents = variable_file_state.contents.as_bytes();
|
||||
let variable_state = RwLockWriteGuard::downgrade(variable_state);
|
||||
let contents = variable_state.contents.as_bytes();
|
||||
let contents_len = i64::try_from(contents.len()).unwrap();
|
||||
|
||||
if offset < 0 || offset >= contents_len {
|
||||
@@ -626,9 +678,9 @@ impl Filesystem for AutheliaFS {
|
||||
let mut handles = self.handles.write();
|
||||
let handle = handles.handles.get_mut(&fh).unwrap();
|
||||
|
||||
let mut variable_file_state = self.variable_state.write();
|
||||
let mut variable_state = self.variable_state.write();
|
||||
|
||||
let old_end = variable_file_state.contents.len();
|
||||
let old_end = variable_state.contents.len();
|
||||
|
||||
let offset = if handle.flags & O_APPEND != 0 {
|
||||
handle.cursor = i64::try_from(old_end).unwrap();
|
||||
@@ -641,8 +693,8 @@ impl Filesystem for AutheliaFS {
|
||||
usize::try_from(offset).unwrap()
|
||||
};
|
||||
|
||||
variable_file_state.access_time = SystemTime::now();
|
||||
variable_file_state.modification_time = SystemTime::now();
|
||||
variable_state.access_time = SystemTime::now();
|
||||
variable_state.modification_time = SystemTime::now();
|
||||
|
||||
let Ok(new_data) = std::str::from_utf8(data) else {
|
||||
reply.error(EINVAL);
|
||||
@@ -653,22 +705,27 @@ impl Filesystem for AutheliaFS {
|
||||
let new_real_end = cmp::max(new_end, old_end);
|
||||
|
||||
let mut new_contents = String::with_capacity(new_real_end);
|
||||
new_contents.push_str(&variable_file_state.contents[..offset]);
|
||||
new_contents.push_str(&variable_state.contents[..offset]);
|
||||
new_contents.push_str(new_data);
|
||||
if new_end < old_end {
|
||||
new_contents.push_str(&variable_file_state.contents[new_end..]);
|
||||
new_contents.push_str(&variable_state.contents[new_end..]);
|
||||
}
|
||||
variable_file_state.contents = new_contents;
|
||||
variable_state.contents = new_contents;
|
||||
|
||||
handle.cursor = i64::try_from(offset + new_data.len()).unwrap();
|
||||
|
||||
drop(handles);
|
||||
|
||||
if let Some(callback) = &self.write_callback {
|
||||
callback(&variable_file_state.contents);
|
||||
}
|
||||
self.write_callback.read().deref()(&self.pg_pool, &variable_state.contents);
|
||||
|
||||
drop(variable_file_state);
|
||||
self.notifier
|
||||
.write()
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.store(ino, 0, variable_state.contents.as_bytes())
|
||||
.unwrap();
|
||||
|
||||
drop(variable_state);
|
||||
|
||||
reply.written(u32::try_from(data.len()).unwrap());
|
||||
}
|
||||
@@ -954,10 +1011,10 @@ impl Filesystem for AutheliaFS {
|
||||
AccessCheckResult::Ok(_) => {}
|
||||
}
|
||||
|
||||
let variable_file_state = self.variable_state.read();
|
||||
let blocks = (variable_file_state.contents.len() as u64)
|
||||
let variable_state = self.variable_state.read();
|
||||
let blocks = (variable_state.contents.len() as u64)
|
||||
.div_ceil(u64::from(self.static_state.block_size));
|
||||
drop(variable_file_state);
|
||||
drop(variable_state);
|
||||
|
||||
reply.statfs(
|
||||
blocks,
|
28
src/main.rs
28
src/main.rs
@@ -2,7 +2,7 @@
|
||||
#![allow(clippy::missing_docs_in_private_items)]
|
||||
|
||||
mod config;
|
||||
mod fuser;
|
||||
mod fuse;
|
||||
mod models;
|
||||
mod routes;
|
||||
mod state;
|
||||
@@ -15,28 +15,26 @@ use log4rs::config::Deserializers;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use config::{Args, Config};
|
||||
use config::Args;
|
||||
use state::State;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let args = Args::parse();
|
||||
log4rs::init_file(args.log_config, Deserializers::default()).unwrap();
|
||||
let args: Args = Args::parse();
|
||||
log4rs::init_file(args.log_config.clone(), Deserializers::default()).unwrap();
|
||||
|
||||
let config = Config::try_from(&args.config).unwrap();
|
||||
let state = State::from_config(config.clone()).await;
|
||||
let state = State::from_args(args).await;
|
||||
|
||||
sqlx::migrate!("./migrations")
|
||||
.run(&state.pg_pool)
|
||||
.await
|
||||
.unwrap();
|
||||
let routes = routes::routes(state.clone());
|
||||
let app = axum::Router::new().nest(&format!("{}/api", state.config.server.subpath), routes);
|
||||
|
||||
let routes = routes::routes(state);
|
||||
let app = axum::Router::new().nest(&format!("{}/api", config.server.subpath), routes);
|
||||
|
||||
let addr = SocketAddr::from((config.server.address, config.server.port));
|
||||
let addr = SocketAddr::from((state.config.server.address, state.config.server.port));
|
||||
let listener = TcpListener::bind(addr).await.unwrap();
|
||||
|
||||
info!("Listening on {}", listener.local_addr().unwrap());
|
||||
serve(listener, app).await.unwrap();
|
||||
|
||||
serve(listener, app)
|
||||
.with_graceful_shutdown(utils::shutdown_signal())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
@@ -1,24 +1,102 @@
|
||||
use log::warn;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use serde_yaml::Value;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::{collections::HashMap, error::Error};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UsersFile {
|
||||
pub users: HashMap<String, UserFile>,
|
||||
pub struct Users {
|
||||
pub users: HashMap<String, User>,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub extra: Option<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UserFile {
|
||||
pub struct User {
|
||||
pub displayname: String,
|
||||
pub password: String,
|
||||
pub email: Option<String>,
|
||||
pub disabled: Option<bool>,
|
||||
pub picture: Option<String>,
|
||||
pub groups: Option<Vec<String>>,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub extra: Option<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl TryInto<Vec<super::users::UserWithGroups>> for Users {
|
||||
type Error = Box<dyn Error + Send + Sync>;
|
||||
|
||||
fn try_into(self) -> Result<Vec<super::users::UserWithGroups>, Self::Error> {
|
||||
self.users
|
||||
.into_iter()
|
||||
.map(|(name, user)| {
|
||||
let groups = user.groups.unwrap_or_default();
|
||||
Ok(super::users::UserWithGroups {
|
||||
name: name.clone(),
|
||||
display_name: user.displayname,
|
||||
password: user.password,
|
||||
email: user
|
||||
.email
|
||||
.ok_or_else(|| format!("User {} is missing an email", &name))?,
|
||||
disabled: user.disabled.unwrap_or(false),
|
||||
picture: user.picture,
|
||||
groups,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Users {
|
||||
pub fn from_fuse(pool: &PgPool, contents: &str) {
|
||||
let Ok(users) = serde_yaml::from_str::<Self>(contents) else {
|
||||
warn!("Failed to parse users from JSON.");
|
||||
return;
|
||||
};
|
||||
|
||||
let users_with_groups: Vec<super::users::UserWithGroups> = match users.try_into() {
|
||||
Ok(users) => users,
|
||||
Err(e) => {
|
||||
warn!("Failed to convert Users to UserWithGroups: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let rt = tokio::runtime::Runtime::new().unwrap();
|
||||
rt.block_on(async {
|
||||
super::users::UserWithGroups::upsert_many_delete_remaining(pool, &users_with_groups)
|
||||
.await
|
||||
.unwrap_or_else(|e| warn!("Failed to upsert users: {e}"));
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn to_fuse(pool: &PgPool) -> Result<String, Box<dyn Error + Send + Sync>> {
|
||||
let users_with_groups = super::users::UserWithGroups::select_all(pool).await?;
|
||||
|
||||
let users = Self {
|
||||
users: users_with_groups
|
||||
.into_iter()
|
||||
.map(|user| {
|
||||
(
|
||||
user.name.clone(),
|
||||
User {
|
||||
displayname: user.display_name,
|
||||
password: user.password,
|
||||
email: Some(user.email),
|
||||
disabled: Some(user.disabled),
|
||||
picture: user.picture,
|
||||
groups: Some(user.groups),
|
||||
extra: None,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
extra: None,
|
||||
};
|
||||
|
||||
Ok(serde_yaml::to_string(&users)?)
|
||||
}
|
||||
}
|
||||
|
@@ -9,7 +9,7 @@ pub struct Group {
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub async fn select_by_name(
|
||||
pub async fn select(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
|
||||
@@ -17,7 +17,7 @@ impl Group {
|
||||
Group,
|
||||
r#"
|
||||
SELECT name
|
||||
FROM groups
|
||||
FROM glyph_groups
|
||||
WHERE name = $1
|
||||
"#,
|
||||
name
|
||||
@@ -28,13 +28,10 @@ impl Group {
|
||||
Ok(group)
|
||||
}
|
||||
|
||||
pub async fn delete_by_name(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM groups
|
||||
DELETE FROM glyph_groups
|
||||
WHERE name = $1
|
||||
"#,
|
||||
name
|
||||
@@ -45,14 +42,14 @@ impl Group {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn all_exist_by_names(
|
||||
pub async fn all_exist(
|
||||
pool: &PgPool,
|
||||
names: &[String],
|
||||
) -> Result<bool, Box<dyn Error + Send + Sync>> {
|
||||
let row = query!(
|
||||
r#"
|
||||
SELECT COUNT(*) AS "count!"
|
||||
FROM groups
|
||||
FROM glyph_groups
|
||||
WHERE name = ANY($1)
|
||||
"#,
|
||||
names
|
||||
@@ -67,20 +64,19 @@ impl Group {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GroupWithUsers {
|
||||
pub name: String,
|
||||
#[serde(default)]
|
||||
pub users: Vec<String>,
|
||||
}
|
||||
|
||||
impl GroupWithUsers {
|
||||
pub async fn select(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
|
||||
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
|
||||
let groups = query_as!(
|
||||
GroupWithUsers,
|
||||
r#"
|
||||
SELECT
|
||||
g.name,
|
||||
COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!"
|
||||
FROM groups g
|
||||
LEFT JOIN users_groups ug ON g.name = ug.group_name
|
||||
GROUP BY g.name
|
||||
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
|
||||
FROM glyph_groups g
|
||||
"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
@@ -89,7 +85,7 @@ impl GroupWithUsers {
|
||||
Ok(groups)
|
||||
}
|
||||
|
||||
pub async fn select_by_name(
|
||||
pub async fn select(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
|
||||
@@ -98,11 +94,9 @@ impl GroupWithUsers {
|
||||
r#"
|
||||
SELECT
|
||||
g.name,
|
||||
COALESCE(array_agg(ug.user_name ORDER BY ug.user_name), ARRAY[]::TEXT[]) AS "users!"
|
||||
FROM groups g
|
||||
LEFT JOIN users_groups ug ON g.name = ug.group_name
|
||||
ARRAY(SELECT ug.user_name FROM glyph_users_groups ug WHERE ug.group_name = g.name) AS "users!"
|
||||
FROM glyph_groups g
|
||||
WHERE g.name = $1
|
||||
GROUP BY g.name
|
||||
"#,
|
||||
name
|
||||
)
|
||||
@@ -119,7 +113,7 @@ impl GroupWithUsers {
|
||||
let mut tx = pool.begin().await?;
|
||||
|
||||
query!(
|
||||
r#"INSERT INTO groups (name) VALUES ($1)"#,
|
||||
r#"INSERT INTO glyph_groups (name) VALUES ($1)"#,
|
||||
group_with_users.name
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
@@ -127,7 +121,7 @@ impl GroupWithUsers {
|
||||
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO users_groups (user_name, group_name)
|
||||
INSERT INTO glyph_users_groups (user_name, group_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[])
|
||||
"#,
|
||||
&group_with_users.users,
|
||||
|
@@ -19,7 +19,7 @@ impl UsersGroups {
|
||||
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM users_groups
|
||||
DELETE FROM glyph_users_groups
|
||||
WHERE group_name = $1
|
||||
"#,
|
||||
group_name
|
||||
@@ -29,7 +29,7 @@ impl UsersGroups {
|
||||
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO users_groups (user_name, group_name)
|
||||
INSERT INTO glyph_users_groups (user_name, group_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[])
|
||||
"#,
|
||||
users,
|
||||
@@ -50,7 +50,7 @@ impl UsersGroups {
|
||||
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM users_groups
|
||||
DELETE FROM glyph_users_groups
|
||||
WHERE user_name = $1
|
||||
"#,
|
||||
user_name
|
||||
@@ -60,7 +60,7 @@ impl UsersGroups {
|
||||
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO users_groups (user_name, group_name)
|
||||
INSERT INTO glyph_users_groups (user_name, group_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[])
|
||||
"#,
|
||||
&vec![user_name.to_string(); groups.len()],
|
||||
|
@@ -1,4 +1,4 @@
|
||||
use std::error::Error;
|
||||
use std::{collections::HashSet, error::Error};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{FromRow, PgPool, query, query_as};
|
||||
@@ -12,19 +12,19 @@ pub struct User {
|
||||
#[serde(default)]
|
||||
pub disabled: bool,
|
||||
#[serde(default)]
|
||||
pub image: Option<String>,
|
||||
pub picture: Option<String>,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub async fn select_by_name(
|
||||
pub async fn select(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
|
||||
let user = query_as!(
|
||||
User,
|
||||
r#"
|
||||
SELECT name, display_name, password, email, disabled, image
|
||||
FROM users
|
||||
SELECT name, display_name, password, email, disabled, picture
|
||||
FROM glyph_users
|
||||
WHERE name = $1
|
||||
"#,
|
||||
name
|
||||
@@ -38,21 +38,21 @@ impl User {
|
||||
pub async fn upsert(pool: &PgPool, user: &Self) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO users (name, display_name, password, email, disabled, image)
|
||||
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
ON CONFLICT (name) DO UPDATE
|
||||
SET display_name = EXCLUDED.display_name,
|
||||
password = EXCLUDED.password,
|
||||
email = EXCLUDED.email,
|
||||
disabled = EXCLUDED.disabled,
|
||||
image = EXCLUDED.image
|
||||
picture = EXCLUDED.picture
|
||||
"#,
|
||||
user.name,
|
||||
user.display_name,
|
||||
user.password,
|
||||
user.email,
|
||||
user.disabled,
|
||||
user.image
|
||||
user.picture
|
||||
)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
@@ -60,13 +60,10 @@ impl User {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_by_name(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
pub async fn delete(pool: &PgPool, name: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM users
|
||||
DELETE FROM glyph_users
|
||||
WHERE name = $1
|
||||
"#,
|
||||
name
|
||||
@@ -77,14 +74,14 @@ impl User {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn all_exist_by_names(
|
||||
pub async fn all_exist(
|
||||
pool: &PgPool,
|
||||
names: &[String],
|
||||
) -> Result<bool, Box<dyn Error + Send + Sync>> {
|
||||
let row = query!(
|
||||
r#"
|
||||
SELECT COUNT(*) AS "count!"
|
||||
FROM users
|
||||
FROM glyph_users
|
||||
WHERE name = ANY($1)
|
||||
"#,
|
||||
names
|
||||
@@ -105,12 +102,13 @@ pub struct UserWithGroups {
|
||||
#[serde(default)]
|
||||
pub disabled: bool,
|
||||
#[serde(default)]
|
||||
pub image: Option<String>,
|
||||
pub picture: Option<String>,
|
||||
#[serde(default)]
|
||||
pub groups: Vec<String>,
|
||||
}
|
||||
|
||||
impl UserWithGroups {
|
||||
pub async fn select(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
|
||||
pub async fn select_all(pool: &PgPool) -> Result<Vec<Self>, Box<dyn Error + Send + Sync>> {
|
||||
let users = query_as!(
|
||||
UserWithGroups,
|
||||
r#"
|
||||
@@ -120,11 +118,9 @@ impl UserWithGroups {
|
||||
u.password,
|
||||
u.email,
|
||||
u.disabled,
|
||||
u.image,
|
||||
COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!"
|
||||
FROM users u
|
||||
LEFT JOIN users_groups ug ON u.name = ug.user_name
|
||||
GROUP BY u.name, u.email, u.disabled, u.image
|
||||
u.picture,
|
||||
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
|
||||
FROM glyph_users u
|
||||
"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
@@ -133,7 +129,7 @@ impl UserWithGroups {
|
||||
Ok(users)
|
||||
}
|
||||
|
||||
pub async fn select_by_name(
|
||||
pub async fn select(
|
||||
pool: &PgPool,
|
||||
name: &str,
|
||||
) -> Result<Option<Self>, Box<dyn Error + Send + Sync>> {
|
||||
@@ -146,12 +142,10 @@ impl UserWithGroups {
|
||||
u.password,
|
||||
u.email,
|
||||
u.disabled,
|
||||
u.image,
|
||||
COALESCE(array_agg(ug.group_name ORDER BY ug.group_name), ARRAY[]::TEXT[]) AS "groups!"
|
||||
FROM users u
|
||||
LEFT JOIN users_groups ug ON u.name = ug.user_name
|
||||
u.picture,
|
||||
ARRAY(SELECT ug.group_name FROM glyph_users_groups ug WHERE ug.user_name = u.name) AS "groups!"
|
||||
FROM glyph_users u
|
||||
WHERE u.name = $1
|
||||
GROUP BY u.name, u.email, u.disabled, u.image
|
||||
"#,
|
||||
name
|
||||
)
|
||||
@@ -168,7 +162,8 @@ impl UserWithGroups {
|
||||
let mut tx = pool.begin().await?;
|
||||
|
||||
query!(
|
||||
r#"INSERT INTO users (name, display_name, password, email, disabled, image)
|
||||
r#"
|
||||
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
"#,
|
||||
user_with_groups.name,
|
||||
@@ -176,14 +171,14 @@ impl UserWithGroups {
|
||||
user_with_groups.password,
|
||||
user_with_groups.email,
|
||||
user_with_groups.disabled,
|
||||
user_with_groups.image
|
||||
user_with_groups.picture
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO users_groups (user_name, group_name)
|
||||
INSERT INTO glyph_users_groups (user_name, group_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[])
|
||||
"#,
|
||||
&user_with_groups.groups,
|
||||
@@ -196,4 +191,93 @@ impl UserWithGroups {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn upsert_many_delete_remaining(
|
||||
pool: &PgPool,
|
||||
users_with_groups: &[Self],
|
||||
) -> Result<(), Box<dyn Error + Send + Sync>> {
|
||||
let mut tx = pool.begin().await?;
|
||||
|
||||
for user in users_with_groups {
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO glyph_users (name, display_name, password, email, disabled, picture)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
ON CONFLICT (name) DO UPDATE
|
||||
SET display_name = EXCLUDED.display_name,
|
||||
password = EXCLUDED.password,
|
||||
email = EXCLUDED.email,
|
||||
disabled = EXCLUDED.disabled,
|
||||
picture = EXCLUDED.picture
|
||||
"#,
|
||||
user.name,
|
||||
user.display_name,
|
||||
user.password,
|
||||
user.email,
|
||||
user.disabled,
|
||||
user.picture
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM glyph_users_groups
|
||||
WHERE user_name = $1
|
||||
"#,
|
||||
user.name
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
if !user.groups.is_empty() {
|
||||
query!(
|
||||
r#"
|
||||
INSERT INTO glyph_users_groups (user_name, group_name)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[])
|
||||
"#,
|
||||
&user.groups,
|
||||
&vec![user.name.clone(); user.groups.len()]
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
let users = users_with_groups
|
||||
.iter()
|
||||
.map(|user| user.name.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM glyph_users
|
||||
WHERE name <> ALL($1)
|
||||
"#,
|
||||
&users
|
||||
)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
let groups = users_with_groups
|
||||
.iter()
|
||||
.flat_map(|user| user.groups.iter().cloned())
|
||||
.collect::<HashSet<_>>()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
query!(
|
||||
r#"
|
||||
DELETE FROM glyph_groups
|
||||
WHERE name <> ALL($1)
|
||||
"#,
|
||||
&groups
|
||||
)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@@ -35,7 +35,7 @@ pub async fn get_all(
|
||||
_: auth::User,
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let groups_with_users = models::groups::GroupWithUsers::select(&pg_pool)
|
||||
let groups_with_users = models::groups::GroupWithUsers::select_all(&pg_pool)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
|
||||
|
||||
@@ -52,7 +52,7 @@ pub async fn get(
|
||||
extract::Path(name): extract::Path<NonEmptyString>,
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let group_with_users = models::groups::GroupWithUsers::select_by_name(&pg_pool, name.as_str())
|
||||
let group_with_users = models::groups::GroupWithUsers::select(&pg_pool, name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
@@ -71,7 +71,7 @@ pub async fn create(
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
extract::Json(group_create): extract::Json<GroupCreate>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
if models::groups::Group::select_by_name(&pg_pool, group_create.name.as_str())
|
||||
if models::groups::Group::select(&pg_pool, group_create.name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.is_some()
|
||||
@@ -85,7 +85,7 @@ pub async fn create(
|
||||
.map(|u| u.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !models::users::User::all_exist_by_names(&pg_pool, &users)
|
||||
if !models::users::User::all_exist(&pg_pool, &users)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
{
|
||||
@@ -116,7 +116,7 @@ pub async fn update(
|
||||
extract::State(config): extract::State<Config>,
|
||||
extract::Json(group_update): extract::Json<GroupUpdate>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let group = models::groups::Group::select_by_name(&pg_pool, name.as_str())
|
||||
let group = models::groups::Group::select(&pg_pool, name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
@@ -126,7 +126,7 @@ pub async fn update(
|
||||
if let Some(users) = &group_update.users {
|
||||
let users = users.iter().map(ToString::to_string).collect::<Vec<_>>();
|
||||
|
||||
if !models::users::User::all_exist_by_names(&pg_pool, &users)
|
||||
if !models::users::User::all_exist(&pg_pool, &users)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
{
|
||||
@@ -163,12 +163,12 @@ pub async fn delete(
|
||||
return Err(StatusCode::FORBIDDEN);
|
||||
}
|
||||
|
||||
let group = models::groups::Group::select_by_name(&pg_pool, &name)
|
||||
let group = models::groups::Group::select(&pg_pool, &name)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
Group::delete_by_name(&pg_pool, &group.name)
|
||||
Group::delete(&pg_pool, &group.name)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
|
||||
|
||||
|
@@ -21,7 +21,7 @@ struct UserResponse {
|
||||
display_name: String,
|
||||
email: String,
|
||||
disabled: bool,
|
||||
image: Option<String>,
|
||||
picture: Option<String>,
|
||||
groups: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ impl From<models::users::UserWithGroups> for UserResponse {
|
||||
display_name: user.display_name,
|
||||
email: user.email,
|
||||
disabled: user.disabled,
|
||||
image: user.image,
|
||||
picture: user.picture,
|
||||
groups: user.groups,
|
||||
}
|
||||
}
|
||||
@@ -43,7 +43,7 @@ pub async fn get_all(
|
||||
_: auth::User,
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let users_with_groups = models::users::UserWithGroups::select(&pg_pool)
|
||||
let users_with_groups = models::users::UserWithGroups::select_all(&pg_pool)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
|
||||
|
||||
@@ -60,7 +60,7 @@ pub async fn get(
|
||||
extract::Path(name): extract::Path<NonEmptyString>,
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let user_with_groups = models::users::UserWithGroups::select_by_name(&pg_pool, name.as_str())
|
||||
let user_with_groups = models::users::UserWithGroups::select(&pg_pool, name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
@@ -74,7 +74,7 @@ pub struct UserCreate {
|
||||
displayname: NonEmptyString,
|
||||
email: NonEmptyString,
|
||||
disabled: bool,
|
||||
image: Option<NonEmptyString>,
|
||||
picture: Option<NonEmptyString>,
|
||||
groups: Vec<NonEmptyString>,
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ pub async fn create(
|
||||
extract::State(pg_pool): extract::State<PgPool>,
|
||||
extract::Json(user_create): extract::Json<UserCreate>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
if models::users::User::select_by_name(&pg_pool, user_create.name.as_str())
|
||||
if models::users::User::select(&pg_pool, user_create.name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.is_some()
|
||||
@@ -97,7 +97,7 @@ pub async fn create(
|
||||
.map(|g| g.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !models::groups::Group::all_exist_by_names(&pg_pool, &groups)
|
||||
if !models::groups::Group::all_exist(&pg_pool, &groups)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
{
|
||||
@@ -110,7 +110,7 @@ pub async fn create(
|
||||
password: generate_random_password_hash(),
|
||||
email: user_create.email.to_string(),
|
||||
disabled: user_create.disabled,
|
||||
image: user_create.image.map(|i| i.to_string()),
|
||||
picture: user_create.picture.map(|i| i.to_string()),
|
||||
groups,
|
||||
};
|
||||
|
||||
@@ -126,7 +126,7 @@ pub struct UserUpdate {
|
||||
display_name: Option<NonEmptyString>,
|
||||
email: Option<NonEmptyString>,
|
||||
disabled: Option<bool>,
|
||||
image: Option<NonEmptyString>,
|
||||
picture: Option<NonEmptyString>,
|
||||
groups: Option<Vec<NonEmptyString>>,
|
||||
}
|
||||
|
||||
@@ -137,7 +137,7 @@ pub async fn update(
|
||||
extract::State(config): extract::State<Config>,
|
||||
extract::Json(user_update): extract::Json<UserUpdate>,
|
||||
) -> Result<impl IntoResponse, StatusCode> {
|
||||
let user = models::users::User::select_by_name(&pg_pool, name.as_str())
|
||||
let user = models::users::User::select(&pg_pool, name.as_str())
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
@@ -150,7 +150,7 @@ pub async fn update(
|
||||
.map(|g| g.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !models::groups::Group::all_exist_by_names(&pg_pool, &groups)
|
||||
if !models::groups::Group::all_exist(&pg_pool, &groups)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
{
|
||||
@@ -183,7 +183,7 @@ pub async fn update(
|
||||
.map(|e| e.to_string())
|
||||
.unwrap_or(user.email),
|
||||
disabled: user_update.disabled.unwrap_or(user.disabled),
|
||||
image: user_update.image.map(|i| i.to_string()).or(user.image),
|
||||
picture: user_update.picture.map(|i| i.to_string()).or(user.picture),
|
||||
};
|
||||
|
||||
models::users::User::upsert(&pg_pool, &user)
|
||||
@@ -206,12 +206,12 @@ pub async fn delete(
|
||||
return Err(StatusCode::FORBIDDEN);
|
||||
}
|
||||
|
||||
let user = models::users::User::select_by_name(&pg_pool, &name)
|
||||
let user = models::users::User::select(&pg_pool, &name)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
models::users::User::delete_by_name(&pg_pool, &user.name)
|
||||
models::users::User::delete(&pg_pool, &user.name)
|
||||
.await
|
||||
.or(Err(StatusCode::INTERNAL_SERVER_ERROR))?;
|
||||
|
||||
|
202
src/state.rs
202
src/state.rs
@@ -1,3 +1,5 @@
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use async_redis_session::RedisSessionStore;
|
||||
use axum::extract::FromRef;
|
||||
use openidconnect::{
|
||||
@@ -10,11 +12,14 @@ use openidconnect::{
|
||||
},
|
||||
reqwest,
|
||||
};
|
||||
use redis::{self, AsyncCommands};
|
||||
use sqlx::{PgPool, postgres::PgPoolOptions};
|
||||
use tokio::spawn;
|
||||
use tokio::{process::Command, spawn, task::JoinHandle, time::sleep};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::{
|
||||
config::{Args, Config},
|
||||
fuse::AutheliaFS,
|
||||
models,
|
||||
};
|
||||
|
||||
pub type OAuthClient<
|
||||
HasAuthUrl = EndpointSet,
|
||||
@@ -46,26 +51,44 @@ pub type OAuthClient<
|
||||
#[derive(Clone)]
|
||||
pub struct State {
|
||||
pub config: Config,
|
||||
pub oauth_http_client: reqwest::Client,
|
||||
pub oauth_client: OAuthClient,
|
||||
pub pg_pool: PgPool,
|
||||
pub redis_client: redis::aio::MultiplexedConnection,
|
||||
pub filesystem: AutheliaFS,
|
||||
pub mount: Arc<JoinHandle<()>>,
|
||||
pub authelia: Arc<JoinHandle<()>>,
|
||||
pub oauth_http_client: reqwest::Client,
|
||||
pub oauth_client: OAuthClient,
|
||||
pub session_store: RedisSessionStore,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub async fn from_config(config: Config) -> Self {
|
||||
let (oauth_http_client, oauth_client) = oauth_client(&config).await;
|
||||
pub async fn from_args(args: Args) -> Self {
|
||||
let config = Config::try_from(&args.config).unwrap();
|
||||
|
||||
let pg_pool = pg_pool(&config).await;
|
||||
sqlx::migrate!("./migrations").run(&pg_pool).await.unwrap();
|
||||
config.admin.upsert(&pg_pool).await.unwrap();
|
||||
|
||||
let redis_client = redis_client(&config).await;
|
||||
|
||||
let (filesystem, mount) = fuse(&config, &pg_pool).await;
|
||||
let contents = models::authelia::Users::to_fuse(&pg_pool).await.unwrap();
|
||||
filesystem.store(contents).await.unwrap();
|
||||
|
||||
let authelia = authelia(args.passthrough);
|
||||
|
||||
let (oauth_http_client, oauth_client) = oauth_client(&config).await;
|
||||
let session_store = session_store(&config);
|
||||
|
||||
Self {
|
||||
config,
|
||||
oauth_http_client,
|
||||
oauth_client,
|
||||
pg_pool,
|
||||
redis_client,
|
||||
filesystem,
|
||||
mount,
|
||||
authelia,
|
||||
oauth_http_client,
|
||||
oauth_client,
|
||||
session_store,
|
||||
}
|
||||
}
|
||||
@@ -77,18 +100,6 @@ impl FromRef<State> for Config {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for reqwest::Client {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.oauth_http_client.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for OAuthClient {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.oauth_client.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for PgPool {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.pg_pool.clone()
|
||||
@@ -101,42 +112,30 @@ impl FromRef<State> for redis::aio::MultiplexedConnection {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for AutheliaFS {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.filesystem.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for reqwest::Client {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.oauth_http_client.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for OAuthClient {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.oauth_client.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRef<State> for RedisSessionStore {
|
||||
fn from_ref(state: &State) -> Self {
|
||||
state.session_store.clone()
|
||||
}
|
||||
}
|
||||
|
||||
async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) {
|
||||
let oauth_http_client = reqwest::ClientBuilder::new()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.danger_accept_invalid_certs(config.oauth.insecure)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let provider_metadata = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(),
|
||||
&oauth_http_client,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let oauth_client = OAuthClient::from_provider_metadata(
|
||||
provider_metadata,
|
||||
ClientId::new(config.oauth.client_id.clone()),
|
||||
Some(ClientSecret::new(config.oauth.client_secret.clone())),
|
||||
)
|
||||
.set_redirect_uri(
|
||||
RedirectUrl::new(format!(
|
||||
"{}{}/api/auth/callback",
|
||||
config.server.host, config.server.subpath
|
||||
))
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
(oauth_http_client, oauth_client)
|
||||
}
|
||||
|
||||
async fn pg_pool(config: &Config) -> PgPool {
|
||||
PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
@@ -159,43 +158,86 @@ async fn redis_client(config: &Config) -> redis::aio::MultiplexedConnection {
|
||||
);
|
||||
|
||||
let client = redis::Client::open(url).unwrap();
|
||||
let mut connection = client.get_multiplexed_async_connection().await.unwrap();
|
||||
client.get_multiplexed_async_connection().await.unwrap()
|
||||
}
|
||||
|
||||
let _: () = redis::cmd("CONFIG")
|
||||
.arg("SET")
|
||||
.arg("notify-keyspace-events")
|
||||
.arg("Ex")
|
||||
.query_async(&mut connection)
|
||||
.await
|
||||
async fn fuse(config: &Config, pg_pool: &PgPool) -> (AutheliaFS, Arc<JoinHandle<()>>) {
|
||||
let fs = AutheliaFS::new(
|
||||
config.fuse.clone(),
|
||||
Some(Box::new(models::authelia::Users::from_fuse)),
|
||||
pg_pool.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let fs_clone = fs.clone();
|
||||
let mount = Arc::new(spawn(async move {
|
||||
loop {
|
||||
let _ = fs_clone.clone().run().await;
|
||||
}
|
||||
}));
|
||||
|
||||
(fs, mount)
|
||||
}
|
||||
|
||||
fn authelia(args: Vec<String>) -> Arc<JoinHandle<()>> {
|
||||
Arc::new(spawn(async move {
|
||||
loop {
|
||||
let _ = Command::new("authelia")
|
||||
.args(args.clone())
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait()
|
||||
.await;
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async fn oauth_client(config: &Config) -> (reqwest::Client, OAuthClient) {
|
||||
let oauth_http_client = reqwest::ClientBuilder::new()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.danger_accept_invalid_certs(config.oauth.insecure)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let database = config.redis.database.to_string();
|
||||
spawn(async move {
|
||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
let rconfig = redis::AsyncConnectionConfig::new().set_push_sender(tx);
|
||||
let mut connection = client
|
||||
.get_multiplexed_async_connection_with_config(&rconfig)
|
||||
let mut provider_metadata = None;
|
||||
|
||||
let retries = 10;
|
||||
let mut backoff = Duration::from_secs(1);
|
||||
|
||||
for i in 0..retries {
|
||||
if let Ok(metadata) = CoreProviderMetadata::discover_async(
|
||||
IssuerUrl::new(config.oauth.issuer_url.clone()).unwrap(),
|
||||
&oauth_http_client,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channel = format!("__keyevent@{database}__:expired");
|
||||
connection.subscribe(&[channel]).await.unwrap();
|
||||
|
||||
while let Some(msg) = rx.recv().await {
|
||||
if let Some(msg) = redis::Msg::from_push_info(msg) {
|
||||
if let Ok(key) = msg.get_payload::<String>() {
|
||||
if !key.starts_with("invite:") {
|
||||
continue;
|
||||
{
|
||||
provider_metadata = Some(metadata);
|
||||
break;
|
||||
}
|
||||
if i == retries - 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
let id = key.trim_start_matches("invite:").to_string();
|
||||
let _: i64 = connection.srem("invite:all", id).await.unwrap();
|
||||
sleep(backoff).await;
|
||||
backoff *= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
connection
|
||||
let provider_metadata = provider_metadata.unwrap();
|
||||
|
||||
let oauth_client = OAuthClient::from_provider_metadata(
|
||||
provider_metadata,
|
||||
ClientId::new(config.oauth.client_id.clone()),
|
||||
Some(ClientSecret::new(config.oauth.client_secret.clone())),
|
||||
)
|
||||
.set_redirect_uri(
|
||||
RedirectUrl::new(format!(
|
||||
"{}{}/api/auth/callback",
|
||||
config.server.host, config.server.subpath
|
||||
))
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
(oauth_http_client, oauth_client)
|
||||
}
|
||||
|
||||
fn session_store(config: &Config) -> RedisSessionStore {
|
||||
|
@@ -33,3 +33,20 @@ pub fn generate_random_password_hash() -> String {
|
||||
|
||||
password_hash
|
||||
}
|
||||
|
||||
pub fn hash_password(password: &str) -> String {
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
|
||||
let argon2 = Argon2::new(
|
||||
argon2::Algorithm::Argon2id,
|
||||
argon2::Version::V0x13,
|
||||
argon2::Params::new(65536, 3, 4, Some(32)).unwrap(),
|
||||
);
|
||||
|
||||
let password_hash = argon2
|
||||
.hash_password(password.as_bytes(), &salt)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
password_hash
|
||||
}
|
||||
|
@@ -1 +1,21 @@
|
||||
use tokio::{select, signal};
|
||||
|
||||
pub mod crypto;
|
||||
|
||||
pub async fn shutdown_signal() {
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c().await.unwrap();
|
||||
};
|
||||
|
||||
let terminate = async {
|
||||
signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
|
||||
.unwrap()
|
||||
.recv()
|
||||
.await;
|
||||
};
|
||||
|
||||
select! {
|
||||
() = ctrl_c => {},
|
||||
() = terminate => {},
|
||||
}
|
||||
}
|
||||
|
@@ -18,11 +18,11 @@ COPY .sqlx ./.sqlx
|
||||
RUN cargo build $(if [ "$BUILD_MODE" = "release" ]; then echo "--release"; else echo ""; fi)
|
||||
RUN mkdir -p build && cp target/$(if [ "$BUILD_MODE" = "release" ]; then echo "release"; else echo "debug"; fi)/glyph build/glyph
|
||||
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
FROM docker.io/authelia/authelia
|
||||
|
||||
COPY --from=builder /app/build/glyph /usr/local/bin/glyph
|
||||
COPY --from=builder /app/build/glyph /usr/bin/glyph
|
||||
COPY --from=builder /usr/lib/x86_64-linux-gnu/libfuse3.so.3 /usr/lib/x86_64-linux-gnu/libfuse3.so.3
|
||||
COPY --from=builder /usr/lib/x86_64-linux-gnu/libgcc_s.so.1 /usr/lib/x86_64-linux-gnu/libgcc_s.so.1
|
||||
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/glyph"]
|
||||
ENTRYPOINT ["/usr/bin/glyph"]
|
||||
CMD ["--help"]
|
||||
|
@@ -6,11 +6,21 @@ spec:
|
||||
containers:
|
||||
- name: glyph
|
||||
image: registry.karaolidis.com/karaolidis/glyph:latest
|
||||
securityContext:
|
||||
privileged: true
|
||||
capabilities:
|
||||
add:
|
||||
- SYS_ADMIN
|
||||
resources:
|
||||
limits:
|
||||
podman.io/device=/dev/fuse: 1
|
||||
volumeMounts:
|
||||
- name: glyph-config
|
||||
mountPath: /etc/glyph
|
||||
- name: authelia-users
|
||||
mountPath: /etc/authelia/users
|
||||
- name: authelia-config
|
||||
mountPath: /etc/authelia/config
|
||||
- name: authelia-storage
|
||||
mountPath: /var/lib/authelia
|
||||
command:
|
||||
[
|
||||
"glyph",
|
||||
@@ -18,6 +28,9 @@ spec:
|
||||
"/etc/glyph/default.yml",
|
||||
--log-config,
|
||||
"/etc/glyph/log4rs.yml",
|
||||
"--",
|
||||
"--config",
|
||||
"/etc/authelia/config/configuration.yml",
|
||||
]
|
||||
|
||||
- name: postgresql
|
||||
@@ -36,22 +49,6 @@ spec:
|
||||
- name: redis
|
||||
image: docker.io/library/redis:latest
|
||||
|
||||
- name: authelia
|
||||
image: docker.io/authelia/authelia:latest
|
||||
volumeMounts:
|
||||
- name: authelia-config
|
||||
mountPath: /etc/authelia
|
||||
- name: authelia-users
|
||||
mountPath: /etc/authelia/users
|
||||
- name: authelia-storage
|
||||
mountPath: /var/lib/authelia
|
||||
command:
|
||||
[
|
||||
"/bin/sh",
|
||||
"-c",
|
||||
"cp /etc/authelia/users.yml /etc/authelia/users/users.yml && exec authelia --config /etc/authelia/configuration.yml",
|
||||
]
|
||||
|
||||
- name: traefik
|
||||
image: docker.io/library/traefik:latest
|
||||
args:
|
||||
@@ -72,13 +69,11 @@ spec:
|
||||
- name: authelia-config
|
||||
configMap:
|
||||
name: authelia-config
|
||||
- name: authelia-users
|
||||
emptyDir: {}
|
||||
- name: authelia-storage
|
||||
emptyDir: {}
|
||||
- name: traefik-config
|
||||
configMap:
|
||||
name: traefik-config
|
||||
- name: authelia-storage
|
||||
emptyDir: {}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
@@ -89,13 +84,6 @@ data:
|
||||
server:
|
||||
host: https://app.glyph.local
|
||||
|
||||
database:
|
||||
host: postgresql
|
||||
port: 5432
|
||||
user: glyph
|
||||
password: glyph
|
||||
database: glyph
|
||||
|
||||
oauth:
|
||||
issuer_url: https://id.glyph.local
|
||||
client_id: glyph
|
||||
@@ -103,13 +91,27 @@ data:
|
||||
admin_group: admins
|
||||
insecure: true
|
||||
|
||||
authelia:
|
||||
user_database: /etc/authelia/users/users.yml
|
||||
fuse:
|
||||
mount_directory: /etc/authelia/users
|
||||
user_database_name: users.yml
|
||||
|
||||
postgresql:
|
||||
host: postgresql
|
||||
port: 5432
|
||||
user: glyph
|
||||
password: glyph
|
||||
database: glyph
|
||||
|
||||
redis:
|
||||
host: redis
|
||||
port: 6379
|
||||
|
||||
admin:
|
||||
name: glyph
|
||||
display_name: Glyph
|
||||
password: glyph
|
||||
email: glyph@karaolidis.com
|
||||
|
||||
log4rs.yml: |
|
||||
appenders:
|
||||
stdout:
|
||||
@@ -198,13 +200,6 @@ data:
|
||||
redirect_uris:
|
||||
- "https://app.glyph.local/api/auth/callback"
|
||||
authorization_policy: "one_factor"
|
||||
users.yml: |
|
||||
users:
|
||||
glyph:
|
||||
displayname: "glyph"
|
||||
password: "$argon2id$v=19$m=65536,t=3,p=4$lobLBhv2SKyVZZZCl+e8Lg$VzPmcTksXBNlJfeztMUqMDgdU47qT5bB1Gk+QHigASQ" # The digest of 'glyph'.
|
||||
groups:
|
||||
- "admins"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
@@ -232,7 +227,7 @@ data:
|
||||
authelia-service:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: "http://authelia:9091"
|
||||
- url: "http://glyph:9091"
|
||||
|
||||
glyph-service:
|
||||
loadBalancer:
|
||||
|
Reference in New Issue
Block a user