Got ldap on nutboy3 and jabber on legatus
This commit is contained in:
parent
c87448ff13
commit
541890c08f
@ -111,11 +111,18 @@ in {
|
|||||||
description = "Map of host to domains to domain options.";
|
description = "Map of host to domains to domain options.";
|
||||||
default = { };
|
default = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
challenge-path = mkOption {
|
||||||
|
type = str;
|
||||||
|
description = "Web-accessible path for responding to ACME challenges.";
|
||||||
|
default = "/run/fudo-acme/challenge";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
security.acme.certs = mapAttrs (domain: domainOpts: {
|
security.acme.certs = mapAttrs (domain: domainOpts: {
|
||||||
email = domainOpts.admin-email;
|
email = domainOpts.admin-email;
|
||||||
|
webroot = cfg.challenge-path;
|
||||||
extraDomainNames = domainOpts.extra-domains;
|
extraDomainNames = domainOpts.extra-domains;
|
||||||
}) localDomains;
|
}) localDomains;
|
||||||
|
|
||||||
@ -130,12 +137,14 @@ in {
|
|||||||
recommendedProxySettings = true;
|
recommendedProxySettings = true;
|
||||||
|
|
||||||
virtualHosts.${config.instance.host-fqdn} = {
|
virtualHosts.${config.instance.host-fqdn} = {
|
||||||
enableACME = true;
|
serverAliases = attrNames localDomains;
|
||||||
forceSSL = true;
|
locations = {
|
||||||
|
"/.well-known/acme-challenge" = {
|
||||||
# Just...force override if you want this to point somewhere.
|
root = cfg.challenge-path;
|
||||||
locations."/" = {
|
};
|
||||||
return = "403 Forbidden";
|
"/" = {
|
||||||
|
return = "301 https://$host$request_uri";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -156,7 +165,9 @@ in {
|
|||||||
copyOpts.chain
|
copyOpts.chain
|
||||||
copyOpts.private-key
|
copyOpts.private-key
|
||||||
]) copies;
|
]) copies;
|
||||||
in unique (concatMap (i: unique i) copy-paths);
|
in (unique (concatMap (i: unique i) copy-paths)) ++ [
|
||||||
|
"d \"${cfg.challenge-path}\" 755 acme nginx - -"
|
||||||
|
];
|
||||||
|
|
||||||
services = concatMapAttrs (domain: domainOpts:
|
services = concatMapAttrs (domain: domainOpts:
|
||||||
concatMapAttrs (copy: copyOpts: let
|
concatMapAttrs (copy: copyOpts: let
|
||||||
|
@ -55,6 +55,8 @@ let
|
|||||||
in {
|
in {
|
||||||
options.fudo.backplane = with types; {
|
options.fudo.backplane = with types; {
|
||||||
|
|
||||||
|
enable = mkEnableOption "Enable backplane (jabber) server on this host.";
|
||||||
|
|
||||||
client-hosts = mkOption {
|
client-hosts = mkOption {
|
||||||
type = attrsOf (submodule clientHostOpts);
|
type = attrsOf (submodule clientHostOpts);
|
||||||
description = "List of backplane client options.";
|
description = "List of backplane client options.";
|
||||||
@ -67,7 +69,7 @@ in {
|
|||||||
default = {};
|
default = {};
|
||||||
};
|
};
|
||||||
|
|
||||||
backplane-host = mkOption {
|
backplane-hostname = mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
description = "Hostname of the backplane XMPP server.";
|
description = "Hostname of the backplane XMPP server.";
|
||||||
};
|
};
|
||||||
|
@ -86,7 +86,7 @@ in {
|
|||||||
partOf = [ "backplane-dns.target" ];
|
partOf = [ "backplane-dns.target" ];
|
||||||
requires = cfg.required-services ++ [ "postgresql.service" ];
|
requires = cfg.required-services ++ [ "postgresql.service" ];
|
||||||
environment = {
|
environment = {
|
||||||
FUDO_DNS_BACKPLANE_XMPP_HOSTNAME = backplane-cfg.backplane-host;
|
FUDO_DNS_BACKPLANE_XMPP_HOSTNAME = backplane-cfg.backplane-hostname;
|
||||||
FUDO_DNS_BACKPLANE_XMPP_USERNAME = cfg.backplane-role.role;
|
FUDO_DNS_BACKPLANE_XMPP_USERNAME = cfg.backplane-role.role;
|
||||||
FUDO_DNS_BACKPLANE_XMPP_PASSWORD_FILE = cfg.backplane-role.password-file;
|
FUDO_DNS_BACKPLANE_XMPP_PASSWORD_FILE = cfg.backplane-role.password-file;
|
||||||
FUDO_DNS_BACKPLANE_DATABASE_HOSTNAME = cfg.database.host;
|
FUDO_DNS_BACKPLANE_DATABASE_HOSTNAME = cfg.database.host;
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
with lib;
|
with lib;
|
||||||
{
|
{
|
||||||
config = mkIf config.fudo.jabber.enable {
|
config = mkIf config.fudo.backplane.enable {
|
||||||
fudo = let
|
fudo = let
|
||||||
cfg = config.fudo.backplane;
|
cfg = config.fudo.backplane;
|
||||||
|
|
||||||
hostname = config.instance.hostname;
|
hostname = config.instance.hostname;
|
||||||
|
|
||||||
backplane-server = cfg.backplane-host;
|
backplane-server = cfg.backplane-hostname;
|
||||||
|
|
||||||
generate-auth-file = name: files: let
|
generate-auth-file = name: files: let
|
||||||
make-entry = name: passwd-file:
|
make-entry = name: passwd-file:
|
||||||
@ -40,6 +40,8 @@ with lib;
|
|||||||
};
|
};
|
||||||
|
|
||||||
jabber = {
|
jabber = {
|
||||||
|
enable = true;
|
||||||
|
|
||||||
environment = {
|
environment = {
|
||||||
FUDO_HOST_PASSWD_FILE =
|
FUDO_HOST_PASSWD_FILE =
|
||||||
secrets.backplane-host-auth.target-file;
|
secrets.backplane-host-auth.target-file;
|
||||||
|
@ -21,6 +21,18 @@ in {
|
|||||||
example = "My Fancy Chat Site";
|
example = "My Fancy Chat Site";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
user = mkOption {
|
||||||
|
type = str;
|
||||||
|
description = "System user as which to run the server.";
|
||||||
|
default = "mattermost";
|
||||||
|
};
|
||||||
|
|
||||||
|
group = mkOption {
|
||||||
|
type = str;
|
||||||
|
description = "System group as which to run the server.";
|
||||||
|
default = "mattermost";
|
||||||
|
};
|
||||||
|
|
||||||
smtp = {
|
smtp = {
|
||||||
server = mkOption {
|
server = mkOption {
|
||||||
type = str;
|
type = str;
|
||||||
@ -111,8 +123,6 @@ in {
|
|||||||
};
|
};
|
||||||
mattermost-config-file-template =
|
mattermost-config-file-template =
|
||||||
pkgs.writeText "mattermost-config.json.template" (builtins.toJSON modified-config);
|
pkgs.writeText "mattermost-config.json.template" (builtins.toJSON modified-config);
|
||||||
mattermost-user = "mattermost";
|
|
||||||
mattermost-group = "mattermost";
|
|
||||||
|
|
||||||
generate-mattermost-config = target: template: smtp-passwd-file: db-passwd-file:
|
generate-mattermost-config = target: template: smtp-passwd-file: db-passwd-file:
|
||||||
pkgs.writeScript "mattermost-config-generator.sh" ''
|
pkgs.writeScript "mattermost-config-generator.sh" ''
|
||||||
@ -124,13 +134,12 @@ in {
|
|||||||
in {
|
in {
|
||||||
users = {
|
users = {
|
||||||
users = {
|
users = {
|
||||||
${mattermost-user} = {
|
${cfg.user} = {
|
||||||
isSystemUser = true;
|
isSystemUser = true;
|
||||||
group = mattermost-group;
|
group = mattermost-group;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
groups.${cfg.group}.members = [ cfg.user ];
|
||||||
groups = { ${mattermost-group} = { members = [ mattermost-user ]; }; };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fudo.system.services.mattermost = {
|
fudo.system.services.mattermost = {
|
||||||
@ -146,62 +155,33 @@ in {
|
|||||||
cfg.database.password-file}
|
cfg.database.password-file}
|
||||||
cp ${cfg.smtp.password-file} ${cfg.state-directory}/config/config.json
|
cp ${cfg.smtp.password-file} ${cfg.state-directory}/config/config.json
|
||||||
cp -uRL ${pkg}/client ${cfg.state-directory}
|
cp -uRL ${pkg}/client ${cfg.state-directory}
|
||||||
chown ${mattermost-user}:${mattermost-group} ${cfg.state-directory}/client
|
chown ${cfg.user}:${cfg.group} ${cfg.state-directory}/client
|
||||||
chmod 0750 ${cfg.state-directory}/client
|
chmod 0750 ${cfg.state-directory}/client
|
||||||
'';
|
'';
|
||||||
execStart = "${pkg}/bin/mattermost";
|
execStart = "${pkg}/bin/mattermost";
|
||||||
workingDirectory = cfg.state-directory;
|
workingDirectory = cfg.state-directory;
|
||||||
user = mattermost-user;
|
user = cfg.user;
|
||||||
group = mattermost-group;
|
group = cfg.group;
|
||||||
};
|
};
|
||||||
|
|
||||||
systemd = {
|
systemd = {
|
||||||
|
|
||||||
tmpfiles.rules = [
|
tmpfiles.rules = [
|
||||||
"d ${cfg.state-directory} 0750 ${mattermost-user} ${mattermost-group} - -"
|
"d ${cfg.state-directory} 0750 ${cfg.user} ${cfg.group} - -"
|
||||||
"d ${cfg.state-directory}/config 0750 ${mattermost-user} ${mattermost-group} - -"
|
"d ${cfg.state-directory}/config 0750 ${cfg.user} ${cfg.group} - -"
|
||||||
"L ${cfg.state-directory}/bin - - - - ${pkg}/bin"
|
"L ${cfg.state-directory}/bin - - - - ${pkg}/bin"
|
||||||
"L ${cfg.state-directory}/fonts - - - - ${pkg}/fonts"
|
"L ${cfg.state-directory}/fonts - - - - ${pkg}/fonts"
|
||||||
"L ${cfg.state-directory}/i18n - - - - ${pkg}/i18n"
|
"L ${cfg.state-directory}/i18n - - - - ${pkg}/i18n"
|
||||||
"L ${cfg.state-directory}/templates - - - - ${pkg}/templates"
|
"L ${cfg.state-directory}/templates - - - - ${pkg}/templates"
|
||||||
];
|
];
|
||||||
|
|
||||||
# services.mattermost = {
|
|
||||||
# description = "Mattermost Chat Server";
|
|
||||||
# wantedBy = [ "multi-user.target" ];
|
|
||||||
# after = [ "network.target" ];
|
|
||||||
|
|
||||||
# preStart = ''
|
|
||||||
# ${generate-mattermost-config
|
|
||||||
# mattermost-config-target
|
|
||||||
# mattermost-config-file-template
|
|
||||||
# cfg.smtp.password-file
|
|
||||||
# cfg.database.password-file}
|
|
||||||
# cp ${cfg.smtp.password-file} ${cfg.state-directory}/config/config.json
|
|
||||||
# cp -uRL ${pkg}/client ${cfg.state-directory}
|
|
||||||
# chown ${mattermost-user}:${mattermost-group} ${cfg.state-directory}/client
|
|
||||||
# chmod 0750 ${cfg.state-directory}/client
|
|
||||||
# '';
|
|
||||||
|
|
||||||
# serviceConfig = {
|
|
||||||
# PermissionsStartOnly = true;
|
|
||||||
# ExecStart = "${pkg}/bin/mattermost";
|
|
||||||
# WorkingDirectory = cfg.state-directory;
|
|
||||||
# Restart = "always";
|
|
||||||
# RestartSec = "10";
|
|
||||||
# LimitNOFILE = "49152";
|
|
||||||
# User = mattermost-user;
|
|
||||||
# Group = mattermost-group;
|
|
||||||
# };
|
|
||||||
# };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
services.nginx = {
|
services.nginx = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
|
||||||
appendHttpConfig = ''
|
appendHttpConfig = ''
|
||||||
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=mattermost_cache:10m max_size=3g inactive=120m use_temp_path=off;
|
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=mattermost_cache:10m max_size=3g inactive=120m use_temp_path=off;
|
||||||
'';
|
'';
|
||||||
|
|
||||||
virtualHosts = {
|
virtualHosts = {
|
||||||
"${cfg.hostname}" = {
|
"${cfg.hostname}" = {
|
||||||
@ -212,48 +192,48 @@ in {
|
|||||||
proxyPass = "http://127.0.0.1:8065";
|
proxyPass = "http://127.0.0.1:8065";
|
||||||
|
|
||||||
extraConfig = ''
|
extraConfig = ''
|
||||||
client_max_body_size 50M;
|
client_max_body_size 50M;
|
||||||
proxy_set_header Connection "";
|
proxy_set_header Connection "";
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header X-Frame-Options SAMEORIGIN;
|
proxy_set_header X-Frame-Options SAMEORIGIN;
|
||||||
proxy_buffers 256 16k;
|
proxy_buffers 256 16k;
|
||||||
proxy_buffer_size 16k;
|
proxy_buffer_size 16k;
|
||||||
proxy_read_timeout 600s;
|
proxy_read_timeout 600s;
|
||||||
proxy_cache mattermost_cache;
|
proxy_cache mattermost_cache;
|
||||||
proxy_cache_revalidate on;
|
proxy_cache_revalidate on;
|
||||||
proxy_cache_min_uses 2;
|
proxy_cache_min_uses 2;
|
||||||
proxy_cache_use_stale timeout;
|
proxy_cache_use_stale timeout;
|
||||||
proxy_cache_lock on;
|
proxy_cache_lock on;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
locations."~ /api/v[0-9]+/(users/)?websocket$" = {
|
locations."~ /api/v[0-9]+/(users/)?websocket$" = {
|
||||||
proxyPass = "http://127.0.0.1:8065";
|
proxyPass = "http://127.0.0.1:8065";
|
||||||
|
|
||||||
extraConfig = ''
|
extraConfig = ''
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
client_max_body_size 50M;
|
client_max_body_size 50M;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
proxy_set_header X-Forwarded-By $server_addr:$server_port;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header X-Frame-Options SAMEORIGIN;
|
proxy_set_header X-Frame-Options SAMEORIGIN;
|
||||||
proxy_buffers 256 16k;
|
proxy_buffers 256 16k;
|
||||||
proxy_buffer_size 16k;
|
proxy_buffer_size 16k;
|
||||||
client_body_timeout 60;
|
client_body_timeout 60;
|
||||||
send_timeout 300;
|
send_timeout 300;
|
||||||
lingering_timeout 5;
|
lingering_timeout 5;
|
||||||
proxy_connect_timeout 90;
|
proxy_connect_timeout 90;
|
||||||
proxy_send_timeout 300;
|
proxy_send_timeout 300;
|
||||||
proxy_read_timeout 90s;
|
proxy_read_timeout 90s;
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -76,7 +76,7 @@ in {
|
|||||||
"${cfg.user}" = {
|
"${cfg.user}" = {
|
||||||
isSystemUser = true;
|
isSystemUser = true;
|
||||||
createHome = true;
|
createHome = true;
|
||||||
home = "/var/home/${cfg.user}";
|
home = "/run/home/${cfg.user}";
|
||||||
group = cfg.user;
|
group = cfg.user;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -90,8 +90,8 @@ in {
|
|||||||
|
|
||||||
systemd = {
|
systemd = {
|
||||||
tmpfiles.rules = [
|
tmpfiles.rules = [
|
||||||
"d /var/home 755 root - - -"
|
"d /run/home 755 root - - -"
|
||||||
"d /var/home/${cfg.user} 700 ${cfg.user} - - -"
|
"d /run/home/${cfg.user} 700 ${cfg.user} - - -"
|
||||||
];
|
];
|
||||||
|
|
||||||
timers.backplane-dns-client = {
|
timers.backplane-dns-client = {
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
with lib;
|
with lib;
|
||||||
let
|
let
|
||||||
hostname = config.instance.hostname;
|
hostname = config.instance.hostname;
|
||||||
|
|
||||||
|
host-secrets = config.fudo.secrets.host-secrets.${hostname};
|
||||||
|
|
||||||
siteOpts = { ... }: with types; {
|
siteOpts = { ... }: with types; {
|
||||||
options = {
|
options = {
|
||||||
@ -45,30 +47,34 @@ let
|
|||||||
loglevel = cfg.log-level;
|
loglevel = cfg.log-level;
|
||||||
|
|
||||||
access_rules = {
|
access_rules = {
|
||||||
c2s = { allow = "all"; };
|
c2s.allow = "all";
|
||||||
announce = { allow = "admin"; };
|
announce.allow = "admin";
|
||||||
configure = { allow = "admin"; };
|
configure.allow = "admin";
|
||||||
pubsub_createnode = { allow = "local"; };
|
pubsub_createnode.allow = "admin";
|
||||||
};
|
};
|
||||||
|
|
||||||
acl = {
|
acl.admin = {
|
||||||
admin = {
|
user = concatMap
|
||||||
user = concatMap
|
(admin: map (site: "${admin}@${site}")
|
||||||
(admin: map (site: "${admin}@${site}")
|
(attrNames cfg.sites))
|
||||||
(attrNames cfg.sites))
|
cfg.admins;
|
||||||
cfg.admins;
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
hosts = attrNames cfg.sites;
|
hosts = attrNames cfg.sites;
|
||||||
|
|
||||||
listen = map (ip: {
|
# By default, listen on all ips
|
||||||
port = cfg.port;
|
listen = let
|
||||||
module = "ejabberd_c2s";
|
common = {
|
||||||
ip = ip;
|
port = cfg.port;
|
||||||
starttls = true;
|
module = "ejabberd_c2s";
|
||||||
starttls_required = true;
|
starttls = true;
|
||||||
}) cfg.listen-ips;
|
starttls_required = true;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
if (cfg.listen-ips != null) then
|
||||||
|
map (ip: { ip = ip; } // common)
|
||||||
|
cfg.listen-ips
|
||||||
|
else [ common ];
|
||||||
|
|
||||||
certfiles = concatMapAttrsToList
|
certfiles = concatMapAttrsToList
|
||||||
(site: siteOpts:
|
(site: siteOpts:
|
||||||
@ -86,27 +92,33 @@ let
|
|||||||
in pkgs.writeText "ejabberd.config.yml.template" config-file;
|
in pkgs.writeText "ejabberd.config.yml.template" config-file;
|
||||||
|
|
||||||
enter-secrets = template: secrets: target: let
|
enter-secrets = template: secrets: target: let
|
||||||
secret-readers = concatStringsSep "\n"
|
|
||||||
(mapAttrsToList
|
|
||||||
(secret: file: "${secret}=$(cat ${file})")
|
|
||||||
secrets);
|
|
||||||
secret-swappers = map
|
secret-swappers = map
|
||||||
(secret: "sed s/${secret}/\$${secret}/g")
|
(secret: "sed s/${secret}/\$${secret}/g")
|
||||||
(attrNames secrets);
|
secrets;
|
||||||
swapper = concatStringsSep " | " secret-swappers;
|
swapper = concatStringsSep " | " secret-swappers;
|
||||||
in pkgs.writeShellScript "ejabberd-generate-config.sh" ''
|
in pkgs.writeShellScript "ejabberd-generate-config.sh" ''
|
||||||
|
[ -f \$${target} ] && rm -f ${target}
|
||||||
|
echo "Copying from ${template} to ${target}"
|
||||||
|
touch ${target}
|
||||||
|
chmod go-rwx ${target}
|
||||||
|
chmod u+rw ${target}
|
||||||
cat ${template} | ${swapper} > ${target}
|
cat ${template} | ${swapper} > ${target}
|
||||||
|
echo "Copying from ${template} to ${target} completed"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
cfg = config.fudo.jabber;
|
cfg = config.fudo.jabber;
|
||||||
|
|
||||||
|
log-dir = "${cfg.state-directory}/logs";
|
||||||
|
spool-dir = "${cfg.state-directory}/spool";
|
||||||
|
|
||||||
in {
|
in {
|
||||||
options.fudo.jabber = with types; {
|
options.fudo.jabber = with types; {
|
||||||
enable = mkEnableOption "Enable ejabberd server.";
|
enable = mkEnableOption "Enable ejabberd server.";
|
||||||
|
|
||||||
listen-ips = mkOption {
|
listen-ips = mkOption {
|
||||||
type = listOf str;
|
type = nullOr (listOf str);
|
||||||
description = "IPs on which to listen for Jabber connections.";
|
description = "IPs on which to listen for Jabber connections.";
|
||||||
|
default = null;
|
||||||
};
|
};
|
||||||
|
|
||||||
port = mkOption {
|
port = mkOption {
|
||||||
@ -147,7 +159,7 @@ in {
|
|||||||
config-file = mkOption {
|
config-file = mkOption {
|
||||||
type = str;
|
type = str;
|
||||||
description = "Location at which to generate the configuration file.";
|
description = "Location at which to generate the configuration file.";
|
||||||
default = "/run/ejabberd/ejabberd.yaml";
|
default = "/run/ejabberd/config/ejabberd.yaml";
|
||||||
};
|
};
|
||||||
|
|
||||||
log-level = mkOption {
|
log-level = mkOption {
|
||||||
@ -160,6 +172,12 @@ in {
|
|||||||
default = 3;
|
default = 3;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
state-directory = mkOption {
|
||||||
|
type = str;
|
||||||
|
description = "Path at which to store ejabberd state.";
|
||||||
|
default = "/var/lib/ejabberd";
|
||||||
|
};
|
||||||
|
|
||||||
environment = mkOption {
|
environment = mkOption {
|
||||||
type = attrsOf str;
|
type = attrsOf str;
|
||||||
description = "Environment variables to set for the ejabberd daemon.";
|
description = "Environment variables to set for the ejabberd daemon.";
|
||||||
@ -187,27 +205,38 @@ in {
|
|||||||
};
|
};
|
||||||
}) cfg.sites;
|
}) cfg.sites;
|
||||||
|
|
||||||
system = {
|
secrets.host-secrets.${hostname}.ejabberd-password-env = let
|
||||||
services.ejabberd-config-generator = let
|
env-vars = mapAttrsToList (secret: file: "${secret}=${readFile file}")
|
||||||
config-generator =
|
cfg.secret-files;
|
||||||
enter-secrets config-file-template cfg.secret-files cfg.config-file;
|
in {
|
||||||
in {
|
source-file = pkgs.writeText "ejabberd-password-env"
|
||||||
script = "${config-generator}";
|
(concatStringsSep "\n" env-vars);
|
||||||
readWritePaths = [ config-dir ];
|
target-file = "/run/ejabberd/environment/config-passwords.env";
|
||||||
workingDirectory = config-dir;
|
user = cfg.user;
|
||||||
user = cfg.user;
|
|
||||||
description = "Generate ejabberd config file with necessary passwords.";
|
|
||||||
postStart = ''
|
|
||||||
chown ${cfg.user} ${cfg.config-file}
|
|
||||||
chmod 0400 ${cfg.config-file}
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# system = {
|
||||||
|
# services.ejabberd-config-generator = let
|
||||||
|
# config-generator =
|
||||||
|
# enter-secrets config-file-template cfg.secret-files cfg.config-file;
|
||||||
|
# in {
|
||||||
|
# script = "${config-generator}";
|
||||||
|
# readWritePaths = [ config-dir ];
|
||||||
|
# workingDirectory = config-dir;
|
||||||
|
# user = cfg.user;
|
||||||
|
# description = "Generate ejabberd config file with necessary passwords.";
|
||||||
|
# postStart = ''
|
||||||
|
# chown ${cfg.user}:${cfg.group} ${cfg.config-file}
|
||||||
|
# chmod 0400 ${cfg.config-file}
|
||||||
|
# '';
|
||||||
|
# };
|
||||||
|
# };
|
||||||
};
|
};
|
||||||
|
|
||||||
systemd = {
|
systemd = {
|
||||||
tmpfiles.rules = [
|
tmpfiles.rules = [
|
||||||
"d '${config-dir}' 0700 ${cfg.user} ${cfg.group} - -'"
|
"d ${config-dir} 0700 ${cfg.user} ${cfg.group} - -"
|
||||||
|
"d ${cfg.state-directory} 0750 ${cfg.user} ${cfg.group} - -"
|
||||||
];
|
];
|
||||||
|
|
||||||
services = {
|
services = {
|
||||||
@ -216,6 +245,22 @@ in {
|
|||||||
requires = [ "ejabberd-config-generator.service" ];
|
requires = [ "ejabberd-config-generator.service" ];
|
||||||
environment = cfg.environment;
|
environment = cfg.environment;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
ejabberd-config-generator = let
|
||||||
|
config-generator =
|
||||||
|
enter-secrets config-file-template (attrNames cfg.secret-files) cfg.config-file;
|
||||||
|
in {
|
||||||
|
description = "Generate ejabberd config file containing passwords.";
|
||||||
|
serviceConfig = {
|
||||||
|
User = cfg.user;
|
||||||
|
ExecStart = "${config-generator}";
|
||||||
|
ExecStartPost = pkgs.writeShellScript "protect-ejabberd-config.sh" ''
|
||||||
|
chown ${cfg.user}:${cfg.group} ${cfg.config-file}
|
||||||
|
chmod 0400 ${cfg.config-file}
|
||||||
|
'';
|
||||||
|
EnvironmentFile = host-secrets.ejabberd-password-env.target-file;
|
||||||
|
};
|
||||||
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -226,6 +271,9 @@ in {
|
|||||||
group = cfg.group;
|
group = cfg.group;
|
||||||
|
|
||||||
configFile = cfg.config-file;
|
configFile = cfg.config-file;
|
||||||
|
|
||||||
|
logsDir = log-dir;
|
||||||
|
spoolDir = spool-dir;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -364,12 +364,6 @@ in {
|
|||||||
attrs = {
|
attrs = {
|
||||||
objectClass = [ "olcDatabaseConfig" "olcFrontendConfig" ];
|
objectClass = [ "olcDatabaseConfig" "olcFrontendConfig" ];
|
||||||
olcDatabase = "{-1}frontend";
|
olcDatabase = "{-1}frontend";
|
||||||
olcAccess = makeAccess {
|
|
||||||
"*" = {
|
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
|
||||||
"*" = "none";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
"olcDatabase={0}config" = {
|
"olcDatabase={0}config" = {
|
||||||
@ -378,7 +372,6 @@ in {
|
|||||||
olcDatabase = "{0}config";
|
olcDatabase = "{0}config";
|
||||||
olcAccess = makeAccess {
|
olcAccess = makeAccess {
|
||||||
"*" = {
|
"*" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
|
||||||
"*" = "none";
|
"*" = "none";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -392,36 +385,33 @@ in {
|
|||||||
# olcRootDN = "cn=admin,${cfg.base}";
|
# olcRootDN = "cn=admin,${cfg.base}";
|
||||||
# olcRootPW = FIXME; # NOTE: this should be hashed...
|
# olcRootPW = FIXME; # NOTE: this should be hashed...
|
||||||
olcDbDirectory = "${cfg.state-directory}/database";
|
olcDbDirectory = "${cfg.state-directory}/database";
|
||||||
olcDbIndex = [ "objectClass eq" "uid eq" ];
|
olcDbIndex = [ "objectClass eq" "uid pres,eq" ];
|
||||||
olcAccess = makeAccess {
|
olcAccess = makeAccess {
|
||||||
"attrs=userPassword,shadowLastChange" = {
|
"attrs=userPassword,shadowLastChange" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
# "dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
||||||
"dn.exact=cn=auth_reader,${cfg.base}" = "read";
|
"dn.exact=cn=auth_reader,${cfg.base}" = "read";
|
||||||
"dn.exact=cn=replicator,${cfg.base}" = "read";
|
|
||||||
"self" = "write";
|
|
||||||
"*" = "auth";
|
"*" = "auth";
|
||||||
};
|
};
|
||||||
"dn=cn=admin,ou=groups,${cfg.base}" = {
|
"dn=cn=admin,ou=groups,${cfg.base}" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
# "dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
||||||
"users" = "read";
|
"anonymous" = "auth";
|
||||||
"*" = "none";
|
"dn.children=dc=fudo,dc=org" = "read";
|
||||||
};
|
};
|
||||||
"dn.subtree=ou=groups,${cfg.base} attrs=memberUid" = {
|
"dn.subtree=ou=groups,${cfg.base} attrs=memberUid" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
# "dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
||||||
"dn.regex=cn=[a-zA-Z][a-zA-Z0-9_]+,ou=hosts,${cfg.base}" = "write";
|
# "dn.regex=cn=[a-zA-Z][a-zA-Z0-9_]+,ou=hosts,${cfg.base}" = "write";
|
||||||
"users" = "read";
|
"anonymous" = "auth";
|
||||||
"*" = "none";
|
"dn.children=dc=fudo,dc=org" = "read";
|
||||||
};
|
};
|
||||||
"dn.subtree=ou=members,${cfg.base} attrs=cn,sn,homeDirectory,loginShell,gecos,description,homeDirectory,uidNumber,gidNumber" = {
|
"dn.subtree=ou=members,${cfg.base} attrs=cn,sn,homeDirectory,loginShell,gecos,description,homeDirectory,uidNumber,gidNumber" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
# "dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
||||||
"dn.exact=cn=user_db_reader,${cfg.base}" = "read";
|
"anonymous" = "auth";
|
||||||
"users" = "read";
|
"dn.children=dc=fudo,dc=org" = "read";
|
||||||
"*" = "none";
|
|
||||||
};
|
};
|
||||||
"*" = {
|
"*" = {
|
||||||
"dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
# "dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth" = "manage";
|
||||||
"users" = "read";
|
"anonymous" = "auth";
|
||||||
"*" = "none";
|
"dn.children=dc=fudo,dc=org" = "read";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -1,8 +1,28 @@
|
|||||||
{ config, lib, pkgs, ... }:
|
{ config, lib, pkgs, ... }:
|
||||||
|
|
||||||
with lib;
|
with lib;
|
||||||
{
|
let
|
||||||
|
cfg = config.fudo.ssh;
|
||||||
|
hostname = config.instance.hostname;
|
||||||
|
|
||||||
|
in {
|
||||||
|
options.fudo.ssh = with types; {
|
||||||
|
whitelistIPs = mkOption {
|
||||||
|
type = listOf str;
|
||||||
|
description =
|
||||||
|
"IPs to which fail2ban rules will not apply (on top of local networks).";
|
||||||
|
default = [];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
|
services.fail2ban = {
|
||||||
|
ignoreIP =
|
||||||
|
config.instance.local-networks ++ cfg.whitelistIPs;
|
||||||
|
maxretry = if config.fudo.hosts.${hostname}.hardened then 3
|
||||||
|
else 20;
|
||||||
|
};
|
||||||
|
|
||||||
programs.ssh.knownHosts = let
|
programs.ssh.knownHosts = let
|
||||||
keyed-hosts =
|
keyed-hosts =
|
||||||
filterAttrs (h: o: o.ssh-pubkeys != [])
|
filterAttrs (h: o: o.ssh-pubkeys != [])
|
||||||
|
Loading…
Reference in New Issue
Block a user