diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2 index d2387c033..8ef005da4 100644 --- a/roles/haproxy/templates/haproxy_backend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_backend.cfg.j2 @@ -67,3 +67,18 @@ {% endfor %} {% endif %} {% endfor %} + +{% if haproxy_ldap_servers is defined %} +#--------------------------------------------------------------------- +# ldap backend +#--------------------------------------------------------------------- +backend ldap_servers + mode tcp + option tcpka + + option ldap-check + + {% for server in haproxy_ldap_servers -%} + server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %} + {% endfor %} +{% endif %} diff --git a/roles/haproxy/templates/haproxy_frontend.cfg.j2 b/roles/haproxy/templates/haproxy_frontend.cfg.j2 index 6082e9c03..4909a0074 100644 --- a/roles/haproxy/templates/haproxy_frontend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_frontend.cfg.j2 @@ -12,8 +12,8 @@ frontend stats # ------------------------------------------------------------------- frontend internet_ip - bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip.ipv4 }}:80 transparent bind {{ haproxy_sni_ip.ipv6 }}:80 transparent # Logging is done in the local_ip backend, otherwise all requests are logged twice @@ -30,7 +30,7 @@ frontend internet_ip http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -51,7 +51,7 @@ frontend internet_ip http-response replace-header Set-Cookie (?i)(^(?!.*samesite).*$) \1;\ SameSite=None if !no_same_site_uas # Remove an already present SameSite cookie attribute for unsupported browsers http-response replace-value Set-Cookie (^.*)(?i);\ *SameSite=(Lax|Strict|None)(.*$) \1\3 if no_same_site_uas - # Log whether the no_same_site_uas ACL has been hit + # Log whether the no_same_site_uas ACL has been hit http-request set-header samesitesupport samesite_notsupported if no_same_site_uas http-request set-header samesitesupport samesite_supported if !no_same_site_uas # We need a dummy backend in order to be able to rewrite the loadbalancer cookies @@ -66,7 +66,7 @@ frontend local_ip acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsunrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -82,7 +82,7 @@ frontend local_ip http-request capture sc_http_req_rate(0) len 4 # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -96,7 +96,7 @@ frontend local_ip http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -111,8 +111,8 @@ frontend local_ip ## ------------------------------------------------------------------- frontend internet_restricted_ip - bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip_restricted.ipv4 }}:80 transparent bind {{ haproxy_sni_ip_restricted.ipv6 }}:80 transparent # Logging is done in the local_ip_restriced backend, otherwise all requests are logged twice @@ -128,8 +128,8 @@ frontend internet_restricted_ip # We redirect all port 80 to port 443 http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs - capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + capture request header User-agent len 256 + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -155,12 +155,12 @@ frontend internet_restricted_ip # frontend restricted ip addresses localhost # traffic coming back from the dummy backend ends up here # ------------------------------------------------------------------- -frontend localhost_restricted +frontend localhost_restricted bind 127.0.0.1:82 accept-proxy acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -177,7 +177,7 @@ frontend localhost_restricted # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -191,7 +191,7 @@ frontend localhost_restricted http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -201,3 +201,19 @@ frontend localhost_restricted http-request redirect location %[base,map_reg(/etc/haproxy/maps/redirects.map)] if { base,map_reg(/etc/haproxy/maps/redirects.map) -m found } {% endif %} + +{% if haproxy_ldap_servers is defined %} +#-------------------------------------------------------------------- +# frontend public ips ldap +# ------------------------------------------------------------------- +listen ldap + mode tcp + no option dontlognull + option tcplog + option logasap + timeout client 900s + timeout server 901s + bind {{ haproxy_sni_ip.ipv4 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + bind {{ haproxy_sni_ip.ipv6 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + use_backend ldap_servers +{% endif %} diff --git a/roles/mailpit/defaults/main.yml b/roles/mailpit/defaults/main.yml new file mode 100644 index 000000000..7647de9dc --- /dev/null +++ b/roles/mailpit/defaults/main.yml @@ -0,0 +1,8 @@ +--- +mailpit: "{{ mailpit_defaults | combine(mailpit_overrides, recursive=true) }}" + +mailpit_defaults: + image: "axllent/mailpit" + hostname: "mailpit.{{ base_domain }}" + user: "mailpit" + group: "mailpit" diff --git a/roles/mailpit/tasks/main.yml b/roles/mailpit/tasks/main.yml new file mode 100644 index 000000000..7fb32d8a7 --- /dev/null +++ b/roles/mailpit/tasks/main.yml @@ -0,0 +1,37 @@ +--- +- name: "Create mailpit group" + group: + name: "{{ mailpit.group }}" + state: "present" + register: "result" + +- name: "Create mailpit user" + user: + name: "{{ mailpit.user }}" + group: "{{ mailpit.group }}" + comment: "User to run Mailpit service" + shell: "/bin/false" + password: "!" + create_home: false + state: "present" + register: "result" + +- name: "Save mailpit user uid" + set_fact: + mailpit_user_uid: "{{ result.uid }}" + +- name: "Create mailpit container" + docker_container: + name: "mailpit" + image: "{{ mailpit.image }}" + restart_policy: "always" + state: "started" + user: "{{ mailpit_user_uid }}" + ports: + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.http.routers.mailpit.rule: "Host(`{{ mailpit.hostname }}`)" + traefik.http.routers.mailpit.tls: "true" + traefik.http.services.mailpit.loadbalancer.server.port: 8025 diff --git a/roles/sram-ldap/defaults/main.yml b/roles/sram-ldap/defaults/main.yml new file mode 100644 index 000000000..e20f4553d --- /dev/null +++ b/roles/sram-ldap/defaults/main.yml @@ -0,0 +1,41 @@ +--- +ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" + +ldap_defaults: + image: "ghcr.io/surfscz/sram-ldap:main" + conf_dir: "{{ current_release_appdir }}/sram/ldap" + ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" + certs_dir: "{{ ldap_defaults.conf_dir }}/certs" + backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" + data_dir: "{{ ldap_defaults.conf_dir}}/data" + uri: "ldap://localhost/" + + user: "openldap" + group: "openldap" + + # admin_group: "ldap_admin" + admins: + - name: Admin + uid: admin + pw_hash: + sshkey: "" + + loglevel: "stats stats2 filter" + + services_password: secret + monitor_password: secret + ldap_monitor_password: secret + + uri: "ldap://localhost/" + rid_prefix: "ldap://" + + base_domain: "{{ base_domain }}" + base_dn: >- + {{ ((ldap_defaults.base_domain.split('.')|length)*['dc=']) | + zip(ldap_defaults.base_domain.split('.')) | list | map('join', '') | list | join(',') }} + services_dn: + basedn: "dc=services,{{ ldap_defaults.base_dn }}" + o: "Services" + binddn: "cn=admin,{{ ldap_defaults.base_dn }}" + + hosts: {} diff --git a/roles/sram-ldap/files/eduMember.ldif b/roles/sram-ldap/files/eduMember.ldif new file mode 100644 index 000000000..42894d596 --- /dev/null +++ b/roles/sram-ldap/files/eduMember.ldif @@ -0,0 +1,27 @@ +dn: cn=eduMember,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduMember +# Internet X.500 Schema for Ldappc +# Includes the eduMember ObjectClass schema +# +# +# An auxiliary object class, "eduMember," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attributes specified as belonging to the +# object class are "isMemberOf" and "hasMember." +# +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.1 + NAME 'isMemberOf' + DESC 'identifiers for groups to which containing entity belongs' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.2 + NAME 'hasMember' + DESC 'identifiers for entities that are members of the group' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.5.2.1 + NAME 'eduMember' + AUXILIARY + MAY ( isMemberOf $ hasMember ) + ) diff --git a/roles/sram-ldap/files/eduPerson.ldif b/roles/sram-ldap/files/eduPerson.ldif new file mode 100644 index 000000000..e4f2c96a0 --- /dev/null +++ b/roles/sram-ldap/files/eduPerson.ldif @@ -0,0 +1,83 @@ +dn: cn=eduperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduperson +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.1 + NAME 'eduPersonAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.7 + NAME 'eduPersonEntitlement' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.2 + NAME 'eduPersonNickName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.3 + NAME 'eduPersonOrgDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.4 + NAME 'eduPersonOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.5 + NAME 'eduPersonPrimaryAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.8 + NAME 'eduPersonPrimaryOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.6 + NAME 'eduPersonPrincipalName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.12 + NAME 'eduPersonPrincipalNamePrior' + DESC 'eduPersonPrincipalNamePrior per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.9 + NAME 'eduPersonScopedAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.10 + NAME 'eduPersonTargetedID' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.11 + NAME 'eduPersonAssurance' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.13 + NAME 'eduPersonUniqueId' + DESC 'eduPersonUniqueId per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.16 + NAME 'eduPersonOrcid' + DESC 'ORCID researcher identifiers belonging to the principal' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.1.2 + NAME 'eduPerson' + AUXILIARY + MAY ( + eduPersonAffiliation $ eduPersonNickname $ eduPersonOrgDN $ + eduPersonOrgUnitDN $ eduPersonPrimaryAffiliation $ + eduPersonPrincipalName $ eduPersonEntitlement $ eduPersonPrimaryOrgUnitDN $ + eduPersonScopedAffiliation $ eduPersonTargetedID $ eduPersonAssurance $ + eduPersonPrincipalNamePrior $ eduPersonUniqueId $ eduPersonOrcid ) + ) diff --git a/roles/sram-ldap/files/groupOfMembers.ldif b/roles/sram-ldap/files/groupOfMembers.ldif new file mode 100644 index 000000000..aa10094d3 --- /dev/null +++ b/roles/sram-ldap/files/groupOfMembers.ldif @@ -0,0 +1,19 @@ +# Internet X.500 Schema for Ldappc +# Includes the groupOfMembers ObjectClass schema +# +# Taken from RFC2307bis draft 2 +# https://tools.ietf.org/html/draft-howard-rfc2307bis-02 +# +# An structural object class, "groupOfMembers" is a convenient container +# for an extensible set of attributes concerning group memberships. +# +dn: cn=groupOfMembers,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: groupOfMembers +olcObjectClasses: ( 1.3.6.1.1.1.2.18 SUP top STRUCTURAL + NAME 'groupOfMembers' + DESC 'A group with members (DNs)' + MUST cn + MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ + description $ member ) + ) diff --git a/roles/sram-ldap/files/ldap-add b/roles/sram-ldap/files/ldap-add new file mode 100644 index 000000000..3d0c5e487 --- /dev/null +++ b/roles/sram-ldap/files/ldap-add @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Copyright (C) 2015-2019 Maciej Delmanowski +# Copyright (C) 2015-2019 DebOps +# SPDX-License-Identifier: GPL-3.0-only + +# Check if specified LDAP schema file is loaded in the local slapd cn=config +# database. If not, try loading it in the server. + + +set -o nounset -o pipefail -o errexit + +schema_file="${1}" + +if [ -z "${schema_file}" ] ; then + printf "Error: You need to specify schema file to load\\n" && exit 1 +fi + +if [ ! -e "${schema_file}" ] ; then + printf "Error: %s does not exist\\n" "${schema_file}" && exit 1 +fi + +if [ ! -r "${schema_file}" ] ; then + printf "Error: %s is unreadable\\n" "${schema_file}" && exit 1 +fi + +# The schema file is already converted, we can deal with them directly +if [[ "${schema_file}" == *.ldif ]] ; then + + # Get the DN of the schema + schema_dn="$(grep -E '^^dn:\s' "${schema_file}")" + + # Get list of already installed schemas from local LDAP server + schema_list() { + ldapsearch -Y EXTERNAL -H ldapi:/// -LLLQ -b 'cn=schema,cn=config' dn \ + | sed -e '/^$/d' -e 's/{[0-9]\+}//' + } + + if schema_list | grep -q "${schema_dn}" ; then + + # Schema is already installed, do nothing + exit 80 + + else + + # Try installing the schema in the database + ldapadd -Y EXTERNAL -H ldapi:/// -f "${schema_file}" + + fi + +fi diff --git a/roles/sram-ldap/files/ldapPublicKey.ldif b/roles/sram-ldap/files/ldapPublicKey.ldif new file mode 100644 index 000000000..8968b6e96 --- /dev/null +++ b/roles/sram-ldap/files/ldapPublicKey.ldif @@ -0,0 +1,21 @@ +dn: cn=openssh-lpk-openldap,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: openssh-lpk-openldap +# +# LDAP Public Key Patch schema for use with openssh-ldappubkey +# useful with PKA-LDAP also +# +# Author: Eric AUGE +# +# Based on the proposal of : Mark Ruijter +# +# octetString SYNTAX +olcAttributeTypes: ( 1.3.6.1.4.1.24552.500.1.1.1.13 NAME 'sshPublicKey' + DESC 'MANDATORY: OpenSSH Public key' + EQUALITY octetStringMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 ) +# printableString SYNTAX yes|no +olcObjectClasses: ( 1.3.6.1.4.1.24552.500.1.1.2.0 NAME 'ldapPublicKey' SUP top AUXILIARY + DESC 'MANDATORY: OpenSSH LPK olcObjectClasses:' + MUST ( sshPublicKey $ uid ) + ) diff --git a/roles/sram-ldap/files/logrotate_slapd b/roles/sram-ldap/files/logrotate_slapd new file mode 100644 index 000000000..f225a935f --- /dev/null +++ b/roles/sram-ldap/files/logrotate_slapd @@ -0,0 +1,13 @@ +/var/log/slapd.log +{ + rotate 7 + daily + missingok + notifempty + delaycompress + compress + postrotate + invoke-rc.d rsyslog rotate > /dev/null + endscript +} + diff --git a/roles/sram-ldap/files/rsyslog_slapd.conf b/roles/sram-ldap/files/rsyslog_slapd.conf new file mode 100644 index 000000000..a3435617f --- /dev/null +++ b/roles/sram-ldap/files/rsyslog_slapd.conf @@ -0,0 +1,2 @@ +if $programname == 'slapd' then /var/log/slapd.log +if $programname == 'slapd' then ~ diff --git a/roles/sram-ldap/files/sczGroup.ldif b/roles/sram-ldap/files/sczGroup.ldif new file mode 100644 index 000000000..d1b5cb332 --- /dev/null +++ b/roles/sram-ldap/files/sczGroup.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sczGroup ObjectClass schema +# +# An auxiliary object class, "sczGroup," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attribute specified as belonging to the +# object class is "sczMember." +# +# It is specifically configured to support the memberOf overlay. +# +dn: cn=sczGroup,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sczGroup +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.40.50.1.1 + NAME 'sczMember' + DESC 'DN identifiers for entities that are members of the group' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.40.50.1 + NAME 'sczGroup' + AUXILIARY + MAY ( sczMember ) + ) diff --git a/roles/sram-ldap/files/sramPerson.ldif b/roles/sram-ldap/files/sramPerson.ldif new file mode 100644 index 000000000..e194381d1 --- /dev/null +++ b/roles/sram-ldap/files/sramPerson.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sramPerson ObjectClass schema +# +# An auxiliary object class, "sramPerson," is a convenient container +# for an extensible set of attributes concerning sram persons. +# At this time, the only attribute specified as belonging to the +# object class is "sramInactiveDays". +# +dn: cn=sramPerson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sramPerson +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.100.20.2.1 NAME 'sramInactiveDays' + DESC 'Number of days this entity was inactive' + EQUALITY IntegerMatch + ORDERING IntegerOrderingMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 + ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.100.20.1.1 NAME 'sramPerson' + AUXILIARY + MAY ( + sramInactiveDays + ) + ) diff --git a/roles/sram-ldap/files/voPerson.ldif b/roles/sram-ldap/files/voPerson.ldif new file mode 100644 index 000000000..bdce11ed8 --- /dev/null +++ b/roles/sram-ldap/files/voPerson.ldif @@ -0,0 +1,44 @@ +dn: cn=voperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: voperson +olcAttributeTypes: {0}( 1.3.6.1.4.1.34998.3.3.1.1 NAME 'voPersonApplicationUID + ' DESC 'voPerson Application-Specific User Identifier' EQUALITY caseIgnoreMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {1}( 1.3.6.1.4.1.34998.3.3.1.2 NAME 'voPersonAuthorName' DE + SC 'voPerson Author Name' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.1 + 15.121.1.15' ) +olcAttributeTypes: {2}( 1.3.6.1.4.1.34998.3.3.1.3 NAME 'voPersonCertificateDN' + DESC 'voPerson Certificate Distinguished Name' EQUALITY distinguishedNameMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {3}( 1.3.6.1.4.1.34998.3.3.1.4 NAME 'voPersonCertificateIss + uerDN' DESC 'voPerson Certificate Issuer DN' EQUALITY distinguishedNameMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {4}( 1.3.6.1.4.1.34998.3.3.1.5 NAME 'voPersonExternalID' DE + SC 'voPerson Scoped External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3 + .6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {5}( 1.3.6.1.4.1.34998.3.3.1.6 NAME 'voPersonID' DESC 'voPe + rson Unique Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115 + .121.1.15' ) +olcAttributeTypes: {6}( 1.3.6.1.4.1.34998.3.3.1.7 NAME 'voPersonPolicyAgreemen + t' DESC 'voPerson Policy Agreement Indicator' EQUALITY caseIgnoreMatch SYNTAX + '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {7}( 1.3.6.1.4.1.34998.3.3.1.8 NAME 'voPersonSoRID' DESC 'v + oPerson External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.146 + 6.115.121.1.15' ) +olcAttributeTypes: {8}( 1.3.6.1.4.1.34998.3.3.1.9 NAME 'voPersonStatus' DESC ' + voPerson Status' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1. + 15' ) +olcAttributeTypes: {9}( 1.3.6.1.4.1.34998.3.3.1.10 NAME 'voPersonAffiliation' + DESC 'voPerson Affiliation Within Local Scope' EQUALITY caseIgnoreMatch SYNTA + X '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {10}( 1.3.6.1.4.1.34998.3.3.1.11 NAME 'voPersonExternalAffi + liation' DESC 'voPerson Scoped External Affiliation' EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {11}( 1.3.6.1.4.1.34998.3.3.1.12 NAME 'voPersonScopedAffili + ation' DESC 'voPerson Affiliation With Explicit Local Scope' EQUALITY caseIgn + oreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcObjectClasses: {0}( 1.3.6.1.4.1.34998.3.3.1 NAME 'voPerson' AUXILIARY MAY ( + voPersonAffiliation $ voPersonApplicationUID $ voPersonAuthorName $ voPerson + CertificateDN $ voPersonCertificateIssuerDN $ voPersonExternalAffiliation $ v + oPersonExternalID $ voPersonID $ voPersonPolicyAgreement $ voPersonScopedAffi + liation $ voPersonSoRID $ voPersonStatus ) ) diff --git a/roles/sram-ldap/handlers/main.yml b/roles/sram-ldap/handlers/main.yml new file mode 100644 index 000000000..0510176a6 --- /dev/null +++ b/roles/sram-ldap/handlers/main.yml @@ -0,0 +1,24 @@ +--- +- name: restart rsyslog + service: + name: rsyslog + state: restarted + listen: "restart rsyslog" + +- name: systemd daemon-reload + systemd: + name: slapd + daemon_reload: yes + +- name: restart LDAP + systemd: + name: slapd + state: restarted + enabled: true + daemon-reload: true + +- name: Restart the ldap container + community.docker.docker_container: + name: "{{ containers.ldap }}" + restart: true + state: started diff --git a/roles/sram-ldap/tasks/admins.yml b/roles/sram-ldap/tasks/admins.yml new file mode 100644 index 000000000..e00115c04 --- /dev/null +++ b/roles/sram-ldap/tasks/admins.yml @@ -0,0 +1,82 @@ +--- +# - name: Initialize DIT admin +# community.general.ldap_entry: +# dn: "{{ services_ldap.binddn }}" +# objectClass: organizationalRole +# attributes: +# cn: "{{ services_ldap.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" + +# determine which users need to be admin +# check for each role of each user if it leads to membership of group {{ldap_admin_group}} +# - name: determine ldap admins +# set_fact: +# ldap_admins: "{{ ldap_admins | default([]) + [item.0] }}" +# when: ldap_admin_group in role_to_groups[item.1] or ldap_admin_group in item.0.groups +# loop: "{{ users | subelements('roles') }}" + +- name: determine ldap admins + set_fact: + ldap_admins: "{{ ldap.admins }}" + +# Find existing ldap admins +- name: Initialize admins (I) + community.general.ldap_search: + dn: "{{ ldap.services_dn.basedn }}" + scope: "onelevel" + filter: "(objectClass=organizationalRole)" + attrs: + - "cn" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + register: "existing_ldap_admins_result" + +# ansible sucks like this: we need to extract the results from the result +- name: Initialize admins (Ia) + set_fact: + existing_ldap_admins: "{{ existing_ldap_admins_result.results }}" + +- debug: + var: "existing_ldap_admins" +- debug: + var: "ldap_admins" + +# Remove LDAP non-admins +- name: Initialize admins (II) + community.general.ldap_entry: + dn: "cn={{ item.cn }},{{ services_ldap.basedn }}" + state: absent + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + when: > + item.cn not in ldap_admins | map(attribute='uid') + and item.cn != 'admin' + loop: "{{existing_ldap_admins}}" + +# Insert LDAP admins +- name: Initialize admins (III) + community.general.ldap_entry: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + objectClass: + - simpleSecurityObject + - organizationalRole + attributes: + description: An LDAP administrator + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + +# Make sure passwords are updated for existing admins +- name: Initialize admins (IV) + community.general.ldap_attrs: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + attributes: + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + diff --git a/roles/sram-ldap/tasks/main.yml b/roles/sram-ldap/tasks/main.yml new file mode 100644 index 000000000..97977e06a --- /dev/null +++ b/roles/sram-ldap/tasks/main.yml @@ -0,0 +1,369 @@ +--- +# playbook to install and configure all components of the LDAP +- name: Install LDAP utils + apt: + state: "present" + name: + - "python3-ldap" # for ansible ldap modules + install_recommends: false + +- name: Ensure that a number of directories exist + file: + path: "{{ item.path }}" + state: "directory" + # owner: "{{ ldap.user }}" + # group: "{{ ldap.group }}" + mode: "{{ item.mode }}" + with_items: + - { path: "{{ldap.ldif_dir}}", mode: "0755" } + - { path: "{{ldap.certs_dir}}", mode: "0755" } + - { path: "{{ldap.data_dir}}", mode: "0777" } + notify: Restart the ldap container + +- name: Copy schemas + copy: + src: "{{ item }}" + dest: "{{ ldap.ldif_dir }}/{{ item }}" + mode: "0644" + with_items: + - sczGroup.ldif + - groupOfMembers.ldif + - eduPerson.ldif + - ldapPublicKey.ldif + - eduMember.ldif + - voPerson.ldif + - sramPerson.ldif + notify: Restart the ldap container + +- name: Copying ldap-add script + copy: + src: "{{ item }}" + dest: "{{ ldap.conf_dir }}/{{ item }}" + mode: "0755" + with_items: + - ldap-add + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Copy wildcard frontend cert +# copy: +# src: "/etc/ssl/certs/sram-https.pem" # was installed here by update-ca-certificates +# remote_src: true +# dest: "{{ldap.certs_dir}}/frontend.crt" +# mode: "0644" +# when: "is_dev" +# notify: Restart the ldap container + +- name: Setup ldap hosts + vars: + host: + key: "%s.{{ ldap.base_domain }}" + value: "%s" + etc_hosts: {} + set_fact: + etc_hosts: >- + {{ etc_hosts | + combine({ host.key | format(item.key): host.value | format(item.value) }) }} + with_dict: "{{ ldap.hosts }}" + +- name: Create the ldap container + community.docker.docker_container: + name: "sram-ldap" + image: "{{ ldap.image }}" + restart_policy: "always" + state: started + pull: true + ports: + - 0.0.0.0:389:389 + env: + LDAP_ORGANISATION: "{{ env }}" + LDAP_DOMAIN: "{{ ldap.base_domain }}" + LDAP_ROOTPASS: "{{ ldap.services_password }}" + etc_hosts: "{{ etc_hosts }}" + volumes: + # For now the target side /opt/ldap is hard-coded + - "{{ ldap.conf_dir }}:/opt/ldap" + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.tcp.routers.ldap.entrypoints: "ldaps" + traefik.tcp.routers.ldap.rule: "HostSNI(`*`)" + traefik.tcp.routers.ldap.tls: "true" + traefik.tcp.services.ldap.loadbalancer.server.port: "389" + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ -S /var/run/slapd/ldapi ]]" + register: "ldap_container" + +- name: Wait for LDAP initialization + ansible.builtin.wait_for: + port: 389 + delay: 5 + +- name: Wait for 5 seconds + ansible.builtin.wait_for: + timeout: 5 + when: "ldap_container is changed" + +- name: Ensure the schemas are added to LDAP + ansible.builtin.shell: + # For now the target side /opt/ldap is hard-coded + cmd: "docker exec sram-ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" + register: "result" + failed_when: "result.rc not in [0,80]" + changed_when: "result.rc != 80" + become: true + loop: + - "sczGroup.ldif" + - "groupOfMembers.ldif" + - "eduPerson.ldif" + - "ldapPublicKey.ldif" + - "eduMember.ldif" + - "voPerson.ldif" + - "sramPerson.ldif" + +- name: Set indices + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcDbIndex: "{{item}}" + state: "present" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + with_items: + - "entryUUID eq" + - "o eq" + - "dc eq" + - "entryCSN eq" + +- name: Set olcDatabase={-1}frontend olcSizeLimit + community.general.ldap_attrs: + dn: "olcDatabase={-1}frontend,cn=config" + state: "exact" + attributes: + olcSizeLimit: "unlimited" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Set config + community.general.ldap_attrs: + dn: "cn=config" + state: "present" + attributes: + olcServerID: "{{ ldap.server_id }}" + olcSizeLimit: "unlimited" + olcLogLevel: "{{ ldap.loglevel }}" + olcAttributeOptions: "time-" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Set TLS config +# community.general.ldap_attrs: +# dn: "cn=config" +# state: "exact" +# attributes: +# olcTLSCACertificateFile: "/opt/ldap/certs/frontend.crt" +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ ldap.services_password }}" +# server_uri: "{{ ldap.uri }}" + +- name: Setup Modules + community.general.ldap_attrs: + dn: cn=module{0},cn=config + attributes: + olcModuleLoad: + - syncprov + - dynlist.so + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Dynlist + community.general.ldap_entry: + dn: olcOverlay=dynlist,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcDynamicList + attributes: + olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Syncprov + community.general.ldap_entry: + dn: olcOverlay=syncprov,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcSyncProvConfig + attributes: + olcSpCheckpoint: 100 10 + olcSpSessionLog: 100 + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# Leave this here in case we do want to build our own +# root database from scratch instead of relying on the +# domain based Debian slapd package setup +# +# - name: Setup main database +# community.general.ldap_attrs: +# dn: olcDatabase={1}mdb,cn=config +# attributes: +# olcSuffix: "{{ services_ldap.basedn }}" +# olcRootDN: "{{ services_ldap.binddn }}" +# olcRootPW: "{{ '%s' | format(services_ldap_password) | slapd_hash }}" +# state: exact +# +# - name: Set root credentials +# community.general.ldap_attrs: +# dn: olcDatabase={0}config,cn=config +# attributes: +# olcAccess: >- +# {0}to * +# by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage +# {% if environment_name=="vm" %} +# by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage +# {% endif %} +# by dn.exact="{{ services_ldap.binddn }}" manage +# by * break +# state: exact +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ services_ldap_password }}" +# server_uri: "{{ ldap_uri }}" +# +# +# - name: Get uid of openldap user +# ansible.builtin.getent: +# database: "passwd" +# key: "openldap" +# +# # store it in a nice var (so line below doesn't get too long) +# - name: Save gid of openldap user +# ansible.builtin.set_fact: +# openldap_auth: "gidNumber={{ansible_facts.getent_passwd['openldap'][2]}}+\ +# uidNumber={{ansible_facts.getent_passwd['openldap'][1]}}" +# + +- name: Set ACLs + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcAccess: + - >- + to dn.regex="(([^,]+),{{ ldap.services_dn.basedn }})$" + by dn.exact="{{ ldap.services_dn.binddn }}" write + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write + by dn.exact,expand="cn=admin,$1" read + by * break + - >- + to * + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage + by dn.regex="cn=[^,]+,{{ ldap.services_dn.basedn }}" read + {% if env=="vm" %} + by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage + {% endif %} + by * break + - >- + to attrs=userPassword + by self write + by anonymous auth + by * break + state: exact + ordered: true + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# ldap_rids: +# 101: ldaps://ldap1.scz-vm.net/ +# 102: ldaps://ldap2.scz-vm.net/ + +- name: Set rids + vars: + start: 101 + rid: + key: "%d" + value: "{{ ldap.rid_prefix }}%s.{{ ldap.base_domain }}/" + ldap_rids: {} + set_fact: + ldap_rids: >- + {{ ldap_rids | combine({ rid.key | format(start|int): + rid.value | format(item.key) }) }} + start: "{{ start|int + 1 }}" + with_dict: "{{ ldap.hosts | dict2items | sort(attribute='key') }}" + +# Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind +# een string representatie van de dict op, die niet meer gebruikt kan worden +# in de hieropvolgende 'Setup rids' task... +# - name: Set rids +# set_fact: +# ldap_rids: >- +# {%- set result = {} %} +# {%- for host in (ldap_hosts | dict2items | sort(attribute='key')) %} +# {%- set _ = result.update({(101 + loop.index0)|string: \ +# ldap_rid_prefix ~ host.key ~ '.' ~ base_domain ~ '/'}) %} +# {%- endfor %} +# {{ result }} + +- name: Setup rids + vars: + rid: >- + rid={} + provider="{}" + searchbase="{{ ldap.services_dn.basedn }}" + type=refreshAndPersist + bindmethod=simple + binddn="{{ ldap.services_dn.binddn }}" + credentials={{ ldap.services_password }} + retry="30 +" + timeout=30 + network-timeout=5 + rids: [] + set_fact: + rids: "{{ rids + [ rid.format(item.key, item.value) ] }}" + with_dict: "{{ dict(ldap_rids) }}" + +- name: Setup Syncrepl + community.general.ldap_attrs: + dn: olcDatabase={1}mdb,cn=config + attributes: + olcSyncrepl: "{{ rids }}" + olcMultiProvider: "TRUE" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# We now have Syncrepl in place, so only write to primary +- name: Initialize DIT + community.general.ldap_entry: + dn: "{{ ldap.services_dn.basedn }}" + state: "present" + objectClass: + - "top" + - "dcObject" + - "organization" + attributes: + dc: "{{ ldap.services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" + o: "{{ ldap.services_dn.o }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + when: > + inventory_hostname in groups['ldap_primary'] + +# We now have Syncrepl in place, so only write to primary +- name: Add ldap admins + include_tasks: "admins.yml" + when: > + inventory_hostname in groups['ldap_primary'] diff --git a/roles/sram-ldap/templates/ldap-backup.sh.j2 b/roles/sram-ldap/templates/ldap-backup.sh.j2 new file mode 100644 index 000000000..2c6aa9201 --- /dev/null +++ b/roles/sram-ldap/templates/ldap-backup.sh.j2 @@ -0,0 +1,19 @@ +#!/bin/bash +# vim:ft=sh +set -e + +if [ $UID -ne 0 ] +then + echo "Sorry, this script must run as root" + exit 1 +fi + +BACKUP_DIR="{{ldap_backup_dir}}" +BACKUP_FILE="$BACKUP_DIR/ldap_$(/bin/date +%Y-%m-%d_%H:%M)" + +mkdir -p -m 0755 "$BACKUP_DIR" + +/usr/sbin/slapcat -o ldif-wrap=no -n0 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db0.ldif.bz2" +/usr/sbin/slapcat -o ldif-wrap=no -n1 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db1.ldif.bz2" + +exit 0 diff --git a/roles/sram-ldap/templates/ldap.conf.j2 b/roles/sram-ldap/templates/ldap.conf.j2 new file mode 100644 index 000000000..d7fa7c227 --- /dev/null +++ b/roles/sram-ldap/templates/ldap.conf.j2 @@ -0,0 +1,16 @@ +# +# LDAP Defaults +# + +# See ldap.conf(5) for details +# This file should be world readable but not world writable. + +#BASE dc=example,dc=com +#URI ldap://ldap.example.com ldap://ldap-master.example.com:666 + +#SIZELIMIT 12 +#TIMELIMIT 15 +#DEREF never + +# TLS certificates (needed for GnuTLS) +TLS_CACERT {{ ssl_certs_dir }}/{{ internal_base_domain }}.crt diff --git a/roles/sram-ldap/templates/slapd.service.j2 b/roles/sram-ldap/templates/slapd.service.j2 new file mode 100644 index 000000000..7e0f79397 --- /dev/null +++ b/roles/sram-ldap/templates/slapd.service.j2 @@ -0,0 +1,20 @@ +[Unit] +Description = LDAP server + +[Service] +Type = forking +User = root +SupplementaryGroups = ssl-cert +ExecStartPre=-/bin/mkdir -p /var/run/slapd +ExecStartPre=-/bin/chown openldap. /var/run/slapd +ExecStart = /usr/sbin/slapd -F /etc/ldap/slapd.d -u openldap -g openldap -h 'ldapi:/// ldap://localhost/ ldaps://{{inventory_hostname}}/' +Restart = always +RestartSec = 30 +PIDFile = /run/slapd/slapd.pid +# defaults are 1024:524288 which is too small for slapd +# see https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=378261 and +# https://www.openldap.org/lists/openldap-software/200802/msg00186.html +LimitNOFILE=4096:524288 + +[Install] +WantedBy = multi-user.target diff --git a/roles/sram-ldap/vars/main.yml b/roles/sram-ldap/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-ldap/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sram-metadata/defaults/main.yml b/roles/sram-metadata/defaults/main.yml new file mode 100644 index 000000000..5355989bc --- /dev/null +++ b/roles/sram-metadata/defaults/main.yml @@ -0,0 +1,81 @@ +--- +metadata: "{{ metadata_defaults | combine(metadata_overrides, recursive=true) }}" + +metadata_defaults: + image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" + image_pyff: "ghcr.io/surfscz/sram-pyff:main" + hostname: "meta.{{ base_domain }}" + basedir: "{{current_release_appdir}}/sram/metadata" + + # server_name: "metadata-server" + + user: "sram-metadata" + group: "sram-metadata" + +# idps_source: "https://metadata.surfconext.nl/idps-metadata.xml" +# idps_cert: | +# -----BEGIN CERTIFICATE----- +# MIIEKjCCAhICEG12w6QqayYAWntxDN59dU0wDQYJKoZIhvcNAQELBQAwPDELMAkG +# A1UEBhMCTkwxEDAOBgNVBAoMB1NVUkZuZXQxGzAZBgNVBAMMElNVUkZjb25leHQg +# Um9vdCBDQTAeFw0xOTAxMTQxNjM5MDVaFw0yNDAxMTgxNjM5MDVaMGsxCzAJBgNV +# BAYTAk5MMRAwDgYDVQQIDAdVdHJlY2h0MRAwDgYDVQQKDAdTVVJGbmV0MRMwEQYD +# VQQLDApTVVJGY29uZXh0MSMwIQYDVQQDDBpTVVJGY29uZXh0IG1ldGFkYXRhIHNp +# Z25lcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMckFyqXzW7dbMt4 +# wDdSLaAjFAbNziUgQaivu4dl9Uf/cZ4f36a9DfQBUSraNoIR76ruwK3TPfFalemp +# xmWTsoVSQpb3AOsWbU+i0YKS1cmcqMUC1fef2j1IbuK4B4nEu9S5saGNVGNvUJ+Y +# jDUpC5vyyp7boW9E1md2jIBI6Mw+ZhlmkPucqaphxurWnm0KbxTZrYLOBZ1IXj6r +# yrRoFwwtjEH+CW8cRn8OATK0q4yb0BVr2gY2tp/lTpASHZ3WVWBK0prwK0KkusY6 +# ck+/vvlk46IdEr803NB0Dm3ECh3i65mfCaWzVTtd/md874paK+65f1JeVyd5I5al +# M2KEpvkCAwEAATANBgkqhkiG9w0BAQsFAAOCAgEAjvJXXkxOqh3K0k2NdDG5EOTy +# bA+koRbAqhdY/qJoSnqTzwBxJc6aPs+L4q2PIoLo0gNJj1Nm1taLusaaK+CBx3ar +# 1kxEika5FM0dqFjD3i7Y5U0FMeDB5cReo8TNdo31VGoY7CbRjtqHLRTuKzNmIfEm +# ahLnHIBtarE82b7Mpg0aLxjrRR+t8wSCriy+e9AEPzC5bWxtPJA+OhU8U9hMuOs5 +# SzKmHwYue4WY3q1rRaDpK3fqgXRDRfznNn9/RDDbBos7CRMSAPEmAO28qLKBW/1z +# a2TKQLddZ3uoCurFNbToSTueKYVEnveQNO2P5X6uy4rcYkjeSiwbmHo7jYuHAxx4 +# uGzHMpoqoGNx+2iYjtUo3dJUXzcZai3X+RuuMKXXvqGzrxJsoKayNVAE1dWoUHJl +# RouPhDLTdZq/pblORhFS8r10rKhSScgrNuN9LTTV7EPFeVr8trocNwl8IruH+eNL +# 6/7b5Y7fb7rvpxeHjWrTz8a9BXAIAv+bgyrg4OHGRcNIQb0XF438HD9r8Zb92B6Z +# VCR3aVS5496+1td+8aN/Blzo59LhKPiHyGZCPHFV/oBqG7nxp603kcWmJOcG+AgB +# 9bFiAimF5LLk/LnMfplK9w0vvxWVcdQkDgVPYvEGNtttj0QC7/jM4ZeihGb6Oyzy +# DZA6aeg73/ygOATQ13A= +# -----END CERTIFICATE----- + idps_filters: [] + + idps_files: + - name: "dummy-idp" + metadata: | + + + + + + + SRAM VM Dummy IdP + SRAM VM Dummy IdP + https://test-idp.sram.example.org/ + + + Administrator + mailto:sinterklaas@example.nl + + + +# idps_xrd: "{{metadata_defaults.basedir}}/certs/surfconext.xrd" +# idps_source_dir: "/opt/metadata-src" +# idps_feed: "{{ metadata_defaults.basedir }}/idps_feed.fd" +# idps_file: "idps.xml" +## +# proxy_frontend_source: "https://satosa.local/frontend.xml" +# proxy_frontend_feed: "{{ metadata_defaults.basedir }}/frontend_feed.fd" +# proxy_frontend_file: "proxy_idp.xml" +# +# proxy_backend_source: "https://satosa.local/metadata/backend.xml" +# proxy_backend_feed: "{{ metadata_defaults.basedir }}/backend_feed.fd" +# proxy_backend_file: "proxy_sp.xml" diff --git a/roles/sram-metadata/files/01_idps.fd b/roles/sram-metadata/files/01_idps.fd new file mode 100644 index 000000000..46d58b663 --- /dev/null +++ b/roles/sram-metadata/files/01_idps.fd @@ -0,0 +1,23 @@ +- load fail_on_error True: + #- "https://metadata.test.surfconext.nl/idps-metadata.xml verify certs/surfconext.crt" + - "https://metadata.test.surfconext.nl/idps-metadata.xml" + - "src/" +- select: + - "https://idp.diy.surfconext.nl/saml2/idp/metadata.php" + - "http://mock-idp" + - "https://login.test.eduid.nl" + - "https://idp-acc.surfnet.nl" + - "https://login.uaccess-a.leidenuniv.nl/nidp/saml2/metadata" + - "test-idp.lab.surf.nl" + - "https://test-idp.sram.surf.nl/saml/saml2/idp/metadata.php" + - "https://idp.ci-runner.sram.surf.nl/saml/saml2/idp/metadata.php" +- xslt: + stylesheet: "xslt/transform.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/idps.xml.new" +- stats diff --git a/roles/sram-metadata/files/02_backend.fd b/roles/sram-metadata/files/02_backend.fd new file mode 100644 index 000000000..698d615a4 --- /dev/null +++ b/roles/sram-metadata/files/02_backend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/backend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_sp.xml.new" +- stats diff --git a/roles/sram-metadata/files/03_frontend.fd b/roles/sram-metadata/files/03_frontend.fd new file mode 100644 index 000000000..252206d42 --- /dev/null +++ b/roles/sram-metadata/files/03_frontend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/frontend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_idp.xml.new" +- stats diff --git a/roles/sram-metadata/files/surf.png b/roles/sram-metadata/files/surf.png new file mode 100644 index 000000000..e2bc4a3c2 Binary files /dev/null and b/roles/sram-metadata/files/surf.png differ diff --git a/roles/sram-metadata/files/surf.svg b/roles/sram-metadata/files/surf.svg new file mode 100644 index 000000000..ffd7822d3 --- /dev/null +++ b/roles/sram-metadata/files/surf.svg @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surf_bimi.svg b/roles/sram-metadata/files/surf_bimi.svg new file mode 100644 index 000000000..f49b7a035 --- /dev/null +++ b/roles/sram-metadata/files/surf_bimi.svg @@ -0,0 +1,15 @@ + + + surf + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surfconext.crt b/roles/sram-metadata/files/surfconext.crt new file mode 100644 index 000000000..0e8f074e8 --- /dev/null +++ b/roles/sram-metadata/files/surfconext.crt @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIIFbjCCA1agAwIBAgIQagXJvtKqIRRO8zD41OktRjANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MSAwHgYDVQQDDBdTVVJGY29uZXh0IENBIDIwMjMgVEVTVDAeFw0yMzA2MDcxMTQxNDRaFw0yNTA2MDYxMTQxNDRaMIGVMQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MTkwNwYDVQQDDDBTVVJGY29uZXh0IHRlc3QgZW52aXJvbm1lbnQgbWV0YWRhdGEgc2lnbmVyIDIwMjMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC1Wj1MYwzY646Wa9td4zUZb5W27+cbARhNbIZsteUIPV6unxoO6tHCLJhRxC4pBTQsdrhfhh3+s5rwm8mjhJs2rciQkCdPiTl860jqihhWi5bFXyGX5o1U5mZgomUT+o7+nUj0et1l/kbFJ0GqIKtf0uPj7R/zqTpqeT0c6VFxchU6LA8GOI9w5XIISEGi/IWlDKyM69I3DmbCip/rm8u6kIQ0qqXh58lNNOsZw8WYokCXP0IPFQWpPkKC1VGYtivwKLzzvNxSGcuvp39ui+37hrdjqiTxK68Z48vJ6l+KsJP+jpDXYBYE/NsSVYez3vbVTB/l664yvBfKyGIHHDdTq5akLCQDgYQzjeNOU1oSZbcsub0k+osp7MFGkslYRhLb0V9tX0Xu+7jXzGthPUWicN0XdlHS0JOlSgOBftPn8kcqYNMF0IZVe6V/AVgfj4/4iDk3OKl9FRctFp3kSa8GzLIbjqmYXpGGIEse6U2gfqHS9WHu4odfKH7rhD3hZssCAwEAAaNSMFAwHQYDVR0OBBYEFNclSgPTrGp4QJQZGjFu6VEBTX4PMB8GA1UdIwQYMBaAFI5kmzwW92s2rRY2B5NNjSYI2oj1MA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQsFAAOCAgEAORNL7FGBkeq6u/rmcNf+jZZz27vw86COPOiN6ygTyxaBq5fmJ4JZlDnlfO4C/4iek2QjKdgPlpvATGUUMXJdO6a7A3/vXNuoIGu3Ug9GW4vpTVPulaYZedPHC8zBsxwRKwxpSTda7ubWDxH3vUxHz/zDOD2O71O6KFj6Ph8JXwa3TLH0xRN5CXa0UMKX0S+ck8MahCYnMtd99EBL/uOr0+D4q2HwxDRDpL4I9yRwyWxCafoR+6OfzO/vc/SGcjEk/9s0DrMKDkDTJlE9eZbaaWFFCkAkg3LHHLMYjykcTvjDEV75OohYcEC5/6uKHcB/ZQjHwkPBqv9pUF897yZ7sxS66GEJmqqVIC+ayWRvC8N+UmvMGWAdohrY7r7CPeTE+iVHaeB7xGTSI9BhTEv3yMNHhqzqIOvgr8h5iCv7B5hQL+V7MRqD7e7X9uRR7wbyGmwT4p4VFbz5VqthCOFobsMxam9Axt+saebRyH6Mg3Ro9D5WgGoZmTP1yyiMrmEHQdf9+iblbfTbRW0irlaX5t58fWB1u4QZqcamlhVcl65Fub0g+QkSyGDMD9G57z3CKOluNy6TxFZOxMynY6CEtaozDaiETm7NaNC1lkhi+SOHKRX5+q0KqJdnEC7GOX69hSDsCT905dpVnr8JgFKoUfXWSmbwTMj45190dw7RMzk= +-----END CERTIFICATE----- diff --git a/roles/sram-metadata/files/transform.xslt b/roles/sram-metadata/files/transform.xslt new file mode 100644 index 000000000..80673b688 --- /dev/null +++ b/roles/sram-metadata/files/transform.xslt @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/transform_proxy.xslt b/roles/sram-metadata/files/transform_proxy.xslt new file mode 100644 index 000000000..6e6f55d09 --- /dev/null +++ b/roles/sram-metadata/files/transform_proxy.xslt @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + Security Response Team + mailto:securityincident@surf.nl + + + + + + + + + + + https://refeds.org/sirtfi2 + https://refeds.org/sirtfi + + + + + + diff --git a/roles/sram-metadata/handlers/main.yml b/roles/sram-metadata/handlers/main.yml new file mode 100644 index 000000000..2787eba12 --- /dev/null +++ b/roles/sram-metadata/handlers/main.yml @@ -0,0 +1,19 @@ +--- +# - name: "enable pyff-metadata job" +# systemd: +# name: "pyff-metadata.timer" +# enabled: true +# state: "started" +# daemon_reload: true + +# - name: "run pyff-metadata job" +# systemd: +# name: "pyff-metadata.service" +# state: "started" +# daemon_reload: true + +- name: Restart the pyFF container + community.docker.docker_container: + name: "sram-metadata-pyff" + restart: true + state: started diff --git a/roles/sram-metadata/tasks/http.yml b/roles/sram-metadata/tasks/http.yml new file mode 100644 index 000000000..befcc1d18 --- /dev/null +++ b/roles/sram-metadata/tasks/http.yml @@ -0,0 +1,48 @@ +--- +- name: "Install index page" + template: + src: "index.html.j2" + dest: "{{metadata.basedir}}/web/index.html" + mode: "0644" + +- name: "Install legacy link" + file: + src: "." + dest: "{{metadata.basedir}}/web/metadata" + state: "link" + +- name: "Install logos" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/web" + mode: "0644" + with_items: + - "surf.svg" + - "surf.png" + - "surf_bimi.svg" + +- name: "Create the metadata-server container" + community.docker.docker_container: + name: "sram-metadata-server" + image: "{{ metadata.image_server }}" + restart_policy: "always" + state: "started" + pull: true + mounts: + - source: "{{metadata.basedir}}/web" + target: "/var/www/html" + type: "bind" + read_only: true + networks: + - name: "loadbalancer" + labels: + traefik.http.routers.metadata.rule: "Host(`{{ metadata.hostname }}`)" + traefik.http.routers.metadata.tls: "true" + traefik.enable: "true" + healthcheck: + test: [ "CMD", "curl", "-fail", "http://localhost/" ] + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" + diff --git a/roles/sram-metadata/tasks/main.yml b/roles/sram-metadata/tasks/main.yml new file mode 100644 index 000000000..d6ac55f29 --- /dev/null +++ b/roles/sram-metadata/tasks/main.yml @@ -0,0 +1,49 @@ +--- +- name: "Create metadata group" + group: + name: "{{ metadata.group }}" + state: "present" + register: "result" + +- name: "Save metadata group gid" + set_fact: + metadata_group_gid: "{{ result.gid }}" + +- name: "Create metadata user" + user: + name: "{{ metadata.user }}" + group: "{{ metadata.group }}" + comment: "User to run metadata service" + shell: "/bin/false" + password: "!" + home: "{{ metadata.basedir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save metadata user uid" + set_fact: + metadata_user_uid: "{{ result.uid }}" + + +- name: "Create metadata directories" + file: + path: "{{ item.dir }}" + state: "directory" + mode: "{{ item.mode }}" + owner: "root" + group: "{{ metadata.group }}" + with_items: + - { dir: "{{metadata.basedir}}/web", mode: "0775" } + - { dir: "{{metadata.basedir}}/feeds", mode: "0755" } + - { dir: "{{metadata.basedir}}/src", mode: "0755" } + - { dir: "{{metadata.basedir}}/certs", mode: "0755" } + - { dir: "{{metadata.basedir}}/xslt", mode: "0755" } + notify: "Restart the pyFF container" + + +- name: "Start pyff container" + include_tasks: "pyff.yml" + +- name: "Start http container" + include_tasks: "http.yml" diff --git a/roles/sram-metadata/tasks/pyff.yml b/roles/sram-metadata/tasks/pyff.yml new file mode 100644 index 000000000..6c66a5696 --- /dev/null +++ b/roles/sram-metadata/tasks/pyff.yml @@ -0,0 +1,106 @@ +--- +- name: "create self-signed Metadata Signing SSL certs" + shell: + cmd: ' + openssl genrsa -out "{{ metadata.basedir }}/certs/signing.key" 2048; + openssl req -new -nodes -x509 -subj "/C=NL/CN=signing" + -days 3650 -key "{{ metadata.basedir }}/certs/signing.key" + -out "{{ metadata.basedir }}/certs/signing.crt" -extensions v3_ca; + chown {{metadata.user}}:{{metadata.group}} {{ metadata.basedir }}/certs/*; + ' + creates: "{{ metadata.basedir }}/certs/signing.crt" + when: "metadata.signing_cert is not defined" + notify: "Restart the pyFF container" + +- name: "Write fixed Metadata signing certificates" + copy: + dest: "{{ metadata.basedir }}/certs/{{ item.file }}" + content: "{{item.contents}}" + mode: "{{item.mode}}" + owner: "{{metadata.user}}" + group: "{{metadata.group}}" + with_items: + - { file: "signing.key", mode: "0640", contents: "{{metadata.signing_cert.priv}}" } + - { file: "signing.crt", mode: "0644", contents: "{{metadata.signing_cert.pub}}" } + when: "metadata.signing_cert is defined" + notify: "Restart the pyFF container" + +- name: "Copy source certificates" + copy: + src: "{{ item }}" + dest: "{{ metadata.basedir }}/certs" + mode: "0644" + with_items: + - "surfconext.crt" + notify: "Restart the pyFF container" + +- name: "Install IdP metadata" + copy: + content: "{{item.metadata}}" + dest: "{{ metadata.basedir }}/src/{{item.name}}.xml" + mode: "0644" + with_items: "{{ metadata.idps_files }}" + notify: "Restart the pyFF container" + +- name: "Copy pyFF xslt transformations" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/xslt" + mode: "0644" + with_items: + - "transform_proxy.xslt" + - "transform.xslt" + notify: "Restart the pyFF container" + +- name: "Copy pyFF feeds" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/feeds" + mode: "0644" + with_items: + - "01_idps.fd" + - "02_backend.fd" + - "03_frontend.fd" + notify: "Restart the pyFF container" + +- name: "Create the pyFF container" + community.docker.docker_container: + name: "sram-metadata-pyff" + image: "{{ metadata.image_pyff }}" + restart_policy: "always" + state: "started" + pull: true + init: true + env: + USER: "{{ metadata_user_uid | string }}" + GROUP: "{{ metadata_group_gid | string }}" + mounts: + - source: "{{ metadata.basedir }}/web" + target: "/opt/pyff/web" + type: "bind" + - source: "{{ metadata.basedir }}/feeds" + target: "/opt/pyff/feeds" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/src" + target: "/opt/pyff/src" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/certs" + target: "/opt/pyff/certs" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/xslt" + target: "/opt/pyff/xslt" + type: "bind" + read_only: true + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ $(($(date +%s)-$(date -r /opt/pyff/web/idps.xml +%s))) -lt 400 ]]" + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" diff --git a/roles/sram-metadata/templates/index.html.j2 b/roles/sram-metadata/templates/index.html.j2 new file mode 100644 index 000000000..f0e40b22b --- /dev/null +++ b/roles/sram-metadata/templates/index.html.j2 @@ -0,0 +1,11 @@ + +SRAM + + +

SRAM metadata

+

SRAM IdP proxy metadata
+(for use by Service Providers)

+

SRAM SP proxy metadata
+(for use by Identity Providers)

+ + diff --git a/roles/sram-metadata/templates/pyff-metadata.service.j2 b/roles/sram-metadata/templates/pyff-metadata.service.j2 new file mode 100644 index 000000000..3df9cc6e8 --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.service.j2 @@ -0,0 +1,12 @@ +[Unit] +Description=pyFF Metadata processing +After=syslog.target network.target + +[Service] +Type=oneshot +WorkingDirectory={{metadata.basedir}} +ExecStart=echo "pyff-metadata" +SyslogIdentifier=pyff-metadata + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/pyff-metadata.timer.j2 b/roles/sram-metadata/templates/pyff-metadata.timer.j2 new file mode 100644 index 000000000..b1231af1f --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.timer.j2 @@ -0,0 +1,8 @@ +[Unit] +Description=Create Metadata timer + +[Timer] +OnCalendar=*:00 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/vhosts.conf.j2 b/roles/sram-metadata/templates/vhosts.conf.j2 new file mode 100644 index 000000000..be3733827 --- /dev/null +++ b/roles/sram-metadata/templates/vhosts.conf.j2 @@ -0,0 +1,15 @@ + + ServerName sram-metadata-server + DocumentRoot /var/www/html + Header always set Referrer-Policy "strict-origin-when-cross-origin" + Header always set X-Content-Type-Options "nosniff" + Header always set X-XSS-Protection "1; mode=block" + + Require all granted + + + Require all granted + Options FollowSymLinks + Options -MultiViews + + diff --git a/roles/sram-metadata/vars/main.yml b/roles/sram-metadata/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-metadata/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sram-midproxy/defaults/main.yml b/roles/sram-midproxy/defaults/main.yml new file mode 100644 index 000000000..3522fcb47 --- /dev/null +++ b/roles/sram-midproxy/defaults/main.yml @@ -0,0 +1,8 @@ +--- +midproxy: + satosa_version: 8 + state_encryption_key: 'secret' + issuer: 'issuer' + client_id: 'client' + client_secret: 'secret' + sp_metadata: 'eb-metadata.xml' diff --git a/roles/sram-midproxy/files/internal_attributes.yaml b/roles/sram-midproxy/files/internal_attributes.yaml new file mode 100644 index 000000000..eb3dcd66e --- /dev/null +++ b/roles/sram-midproxy/files/internal_attributes.yaml @@ -0,0 +1,22 @@ +attributes: + displayname: + openid: [name] + saml: [displayName] + givenname: + openid: [given_name] + saml: [givenName] + mail: + openid: [email] + saml: [mail] + name: + openid: [name] + saml: [cn] + surname: + openid: [family_name] + saml: [sn, surname] + uid: + openid: [sub] + saml: [uid] + schachomeorganization: + openid: [schac_home_organization] + saml: [schacHomeOrganization] diff --git a/roles/sram-midproxy/files/plugins/attribute-maps/basic.py b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py new file mode 100644 index 000000000..f98466df5 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py @@ -0,0 +1,51 @@ +DEF = "urn:mace:dir:attribute-def:" +TERENA = "urn:mace:terena.org:attribute-def:" + +MAP = { + "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic", + "fro": { + f"{TERENA}schacHomeOrganization": "schacHomeOrganization", + f"{DEF}cn": "cn", + f"{DEF}displayName": "displayName", + f"{DEF}eduPersonAffiliation": "eduPersonAffiliation", + f"{DEF}eduPersonEntitlement": "eduPersonEntitlement", + f"{DEF}eduPersonPrincipalName": "eduPersonPrincipalName", + f"{DEF}eduPersonScopedAffiliation": "eduPersonScopedAffiliation", + f"{DEF}eduPersonTargetedID": "eduPersonTargetedID", + f"{DEF}eduPersonAssurance": "eduPersonAssurance", + f"{DEF}email": "email", + f"{DEF}emailAddress": "emailAddress", + f"{DEF}givenName": "givenName", + f"{DEF}gn": "gn", + f"{DEF}isMemberOf": "isMemberOf", + f"{DEF}mail": "mail", + f"{DEF}member": "member", + f"{DEF}name": "name", + f"{DEF}sn": "sn", + f"{DEF}surname": "surname", + f"{DEF}uid": "uid", + }, + "to": { + "schacHomeOrganization": f"{TERENA}schacHomeOrganization", + "cn": f"{DEF}cn", + "displayName": f"{DEF}displayName", + "eduPersonAffiliation": f"{DEF}eduPersonAffiliation", + "eduPersonEntitlement": f"{DEF}eduPersonEntitlement", + "eduPersonPrincipalName": f"{DEF}eduPersonPrincipalName", + "eduPersonScopedAffiliation": f"{DEF}eduPersonScopedAffiliation", + "eduPersonTargetedID": f"{DEF}eduPersonTargetedID", + "eduPersonAssurance": f"{DEF}eduPersonAssurance", + "eduPersonOrcid": f"{DEF}eduPersonOrcid", + "email": f"{DEF}email", + "emailAddress": f"{DEF}emailAddress", + "givenName": f"{DEF}givenName", + "gn": f"{DEF}gn", + "isMemberOf": f"{DEF}isMemberOf", + "mail": f"{DEF}mail", + "member": f"{DEF}member", + "name": f"{DEF}name", + "sn": f"{DEF}sn", + "surname": f"{DEF}surname", + "uid": f"{DEF}uid", + }, +} diff --git a/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml new file mode 100644 index 000000000..cb78fcccd --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml @@ -0,0 +1,14 @@ +module: satosa.backends.openid_connect.OpenIDConnectBackend +name: myaccessid +config: + provider_metadata: + issuer: !ENV SATOSA_ISSUER + client: + verify_ssl: yes + auth_req_params: + response_type: code + scope: [openid, profile, email, schac_home_organization] + client_metadata: + client_id: !ENV SATOSA_CLIENT_ID + client_secret: !ENV SATOSA_CLIENT_SECRET + redirect_uris: [/] diff --git a/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml new file mode 100644 index 000000000..ed97d539c --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml @@ -0,0 +1 @@ +--- diff --git a/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml new file mode 100644 index 000000000..c09b218b6 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml @@ -0,0 +1,3 @@ +module: satosa.frontends.ping.PingFrontend +name: ping +config: null diff --git a/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml new file mode 100644 index 000000000..1f8029b66 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml @@ -0,0 +1,63 @@ +module: satosa.frontends.saml2.SAMLFrontend +name: idp +config: + #acr_mapping: + # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" + + endpoints: + single_sign_on_service: + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + + # If configured and not false or empty the common domain cookie _saml_idp will be set + # with or have appended the IdP used for authentication. The default is not to set the + # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie + # will be set to the value for the 'domain' key. If no 'domain' is set then the domain + # from the BASE defined for the proxy will be used. + #common_domain_cookie: + # domain: .example.com + + entityid_endpoint: true + enable_metadata_reload: no + + idp_config: + organization: {display_name: SURF, name: SURF, url: 'https://www.surf.nl/'} + contact_person: + - {contact_type: technical, email_address: 'mailto:sram-beheer@surf.nl', given_name: Technical} + - {contact_type: support, email_address: 'mailto:sram-beheer@surf.nl', given_name: Support} + - {contact_type: other, email_address: 'mailto:sram-beheer@surf.nl', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} + key_file: frontend.key + cert_file: frontend.crt + metadata: + # remote: + # - url: https://engine.test2.surfconext.nl/authentication/sp/metadata + # cert: null + local: [!ENV SATOSA_SP_METADATA] + entityid: //proxy.xml + accepted_time_diff: 60 + attribute_map_dir: plugins/attribute-maps + service: + idp: + endpoints: + single_sign_on_service: [] + name: Proxy IdP + ui_info: + display_name: + - lang: en + text: "MyAccessID proxy" + description: + - lang: en + text: "MyAccessID proxy" + keywords: + - lang: en + text: ["MyAccessID", "proxy"] + name_id_format: ['urn:oasis:names:tc:SAML:2.0:nameid-format:persistent', 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient'] + policy: + default: + fail_on_missing_requested: false + name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:basic + attribute_restrictions: null + lifetime: {minutes: 15} + encrypt_assertion: false + encrypted_advice_attributes: false diff --git a/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml new file mode 100644 index 000000000..86ac4e1f1 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml @@ -0,0 +1,8 @@ +module: satosa.micro_services.attribute_generation.AddSyntheticAttributes +name: AddSyntheticAttributes +config: + synthetic_attributes: + default: + default: + schachomeorganization: >- + {{ uid.scope }} diff --git a/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml new file mode 100644 index 000000000..e820311e7 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml @@ -0,0 +1,10 @@ +module: satosa.micro_services.attribute_processor.AttributeProcessor +name: RegexAttributeProcessor +config: + process: + - attribute: uid + processors: + - name: RegexSubProcessor + module: satosa.micro_services.processors.regex_sub_processor + regex_sub_match_pattern: ^(.+)@.+$ + regex_sub_replace_pattern: \1 diff --git a/roles/sram-midproxy/files/proxy_conf.yaml b/roles/sram-midproxy/files/proxy_conf.yaml new file mode 100644 index 000000000..136268e61 --- /dev/null +++ b/roles/sram-midproxy/files/proxy_conf.yaml @@ -0,0 +1,74 @@ +# BASE: https://example.com +BASE: !ENV SATOSA_BASE + +COOKIE_STATE_NAME: "SATOSA_STATE" +CONTEXT_STATE_DELETE: yes +#STATE_ENCRYPTION_KEY: "asdASD123" + +cookies_samesite_compat: + - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"] + +INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + +BACKEND_MODULES: + - "plugins/backends/openid_backend.yaml" + +FRONTEND_MODULES: + - "plugins/frontends/saml2_frontend.yaml" + - "plugins/frontends/ping_frontend.yaml" + +MICRO_SERVICES: + - "plugins/microservices/generate_attributes.yaml" + - "plugins/microservices/regex_attributes.yaml" + +LOGGING: + version: 1 + formatters: + simple: + format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" + handlers: + stdout: + class: logging.StreamHandler + stream: "ext://sys.stdout" + level: INFO + formatter: simple + syslog: + class: logging.handlers.SysLogHandler + address: "/dev/log" + level: INFO + formatter: simple + debug_file: + class: logging.FileHandler + filename: satosa-debug.log + encoding: utf8 + level: INFO + formatter: simple + error_file: + class: logging.FileHandler + filename: satosa-error.log + encoding: utf8 + level: ERROR + formatter: simple + info_file: + class: logging.handlers.RotatingFileHandler + filename: satosa-info.log + encoding: utf8 + maxBytes: 10485760 # 10MB + backupCount: 20 + level: INFO + formatter: simple + loggers: + satosa: + level: INFO + saml2: + level: INFO + oidcendpoint: + level: INFO + pyop: + level: INFO + oic: + level: INFO + root: + level: INFO + handlers: + - stdout diff --git a/roles/sram-midproxy/tasks/main.yml b/roles/sram-midproxy/tasks/main.yml new file mode 100644 index 000000000..2962a6f08 --- /dev/null +++ b/roles/sram-midproxy/tasks/main.yml @@ -0,0 +1,59 @@ +--- +- name: Create directory to keep configfile + ansible.builtin.file: + dest: "/opt/sram/midproxy" + state: directory + owner: 1000 + group: 1000 + mode: "0770" + +- name: Copy EB SP metadata + ansible.builtin.copy: + src: "{{ inventory_dir }}/files/midproxy/{{ midproxy.sp_metadata }}" + dest: "/opt/sram/midproxy/{{ midproxy.sp_metadata }}" + owner: 1000 + group: 1000 + mode: "0740" + +- name: Copy SATOSA conf files + ansible.builtin.copy: + src: "{{ item }}" + dest: "/opt/sram/midproxy/{{ item }}" + owner: 1000 + group: 1000 + with_items: + - internal_attributes.yaml + - proxy_conf.yaml + - plugins/ + +- name: Create the SATOSA container + community.docker.docker_container: + name: midproxy + image: satosa:{{ midproxy.satosa_version }} + pull: true + restart_policy: "always" + state: started + restart: true + networks: + - name: "loadbalancer" + env: + SATOSA_BASE: 'https://midproxy.{{ openconextaccess_base_domain }}' + SATOSA_STATE_ENCRYPTION_KEY: '{{ midproxy_state_encryption_key }}' + SATOSA_SP_METADATA: '{{ midproxy.sp_metadata }}' + SATOSA_ISSUER: '{{ midproxy.issuer }}' + SATOSA_CLIENT_ID: '{{ midproxy_client_id }}' + SATOSA_CLIENT_SECRET: '{{ midproxy_client_secret }}' + volumes: + - /opt/sram/midproxy:/etc/satosa + labels: + traefik.http.routers.midproxy.rule: "Host(`midproxy.{{ openconextaccess_base_domain }}`)" + traefik.http.routers.midproxy.tls: "true" + traefik.enable: "true" + # curl is not availavble in the minimized satosa image + # so this healthcheck won't work + # healthcheck: + # test: ["CMD", "curl", "--fail" , "http://localhost" ] + # interval: 10s + # timeout: 10s + # retries: 3 + # start_period: 10s diff --git a/roles/sram-plsc/defaults/main.yml b/roles/sram-plsc/defaults/main.yml new file mode 100644 index 000000000..2a3711b0c --- /dev/null +++ b/roles/sram-plsc/defaults/main.yml @@ -0,0 +1,15 @@ +--- +plsc: "{{ plsc_defaults | combine(plsc_overrides, recursive=true) }}" + +plsc_defaults: + image: "ghcr.io/surfscz/sram-plsc:main" + conf_dir: "{{current_release_appdir}}/sram/plsc" + ansible_nolog: false + ldap_uri: "ldap://ldap:389/" + ldap_basedn: "dc=services,dc=vnet" + ldap_binddn: "cn=admin,dc=vnet" + ldap_password: "secret" + sbs_host: "http://sbs-server:8080" + sbs_user: "sysread" + sbs_password: "secret" + retry: 3 diff --git a/roles/sram-plsc/handlers/main.yml b/roles/sram-plsc/handlers/main.yml new file mode 100644 index 000000000..70cbb3672 --- /dev/null +++ b/roles/sram-plsc/handlers/main.yml @@ -0,0 +1,18 @@ +--- +# - name: enable plsc job +# systemd: +# name: "plsc.timer" +# enabled: true +# state: "restarted" +# daemon_reload: true + +# - name: "restart zabbix-agent" +# systemd: +# name: "zabbix-agent2.service" +# state: "restarted" + +- name: Restart the plsc container + community.docker.docker_container: + name: sram-plsc + restart: true + state: started diff --git a/roles/sram-plsc/tasks/main.yml b/roles/sram-plsc/tasks/main.yml new file mode 100644 index 000000000..0d3900bd9 --- /dev/null +++ b/roles/sram-plsc/tasks/main.yml @@ -0,0 +1,28 @@ +--- +- name: Make sure clients sync directory exists + file: + path: "{{ plsc.conf_dir }}" + state: directory + mode: "0755" + +- name: "Create plsc.yml source if it doesn't exist" + template: + src: "plsc.yml.j2" + dest: "{{ plsc.conf_dir }}/plsc.yml" + mode: "0640" + no_log: "{{plsc.ansible_nolog}}" + notify: "Restart the plsc container" + +- name: Create the plsc container + community.docker.docker_container: + name: "sram-plsc" + image: "{{ plsc.image }}" + restart_policy: "always" + state: started + pull: true + mounts: + - type: bind + source: "{{ plsc.conf_dir }}/plsc.yml" + target: "/opt/plsc/plsc.yml" + networks: + - name: "loadbalancer" diff --git a/roles/sram-plsc/templates/plsc.yml.j2 b/roles/sram-plsc/templates/plsc.yml.j2 new file mode 100644 index 000000000..a42c00807 --- /dev/null +++ b/roles/sram-plsc/templates/plsc.yml.j2 @@ -0,0 +1,25 @@ +--- +ldap: + src: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 + dst: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 +sbs: + src: + host: "{{ plsc.sbs_host }}" + user: "{{ plsc.sbs_user }}" + passwd: "{{ plsc.sbs_password }}" + verify_ssl: {{ false if env=='vm' else true }} + timeout: 60 + retry: {{ plsc.retry }} +pwd: "{CRYPT}!" +uid: 1000 +gid: 1000 diff --git a/roles/sram-plsc/vars/main.yml b/roles/sram-plsc/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-plsc/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sram-redis/defaults/main.yml b/roles/sram-redis/defaults/main.yml new file mode 100644 index 000000000..31d44935e --- /dev/null +++ b/roles/sram-redis/defaults/main.yml @@ -0,0 +1,11 @@ +--- +redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" +redis_defaults: + image: "docker.io/library/redis:7" + conf_dir: "{{ current_release_appdir }}/sram/redis" + data_dir: "{{ current_release_appdir }}/sram/redis/data" + user: redis + group: redis + redis_user: default + redis_password: changethispassword + max_memory: 100mb diff --git a/roles/sram-redis/handlers/main.yml b/roles/sram-redis/handlers/main.yml new file mode 100644 index 000000000..b08f0b62b --- /dev/null +++ b/roles/sram-redis/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Restart redis container + community.docker.docker_container: + name: sram-redis + state: started + restart: true diff --git a/roles/sram-redis/tasks/main.yml b/roles/sram-redis/tasks/main.yml new file mode 100644 index 000000000..72789b08f --- /dev/null +++ b/roles/sram-redis/tasks/main.yml @@ -0,0 +1,61 @@ +--- +- name: "Create redis group" + group: + name: "{{ redis.group }}" + state: "present" + register: "result" + +- name: "Save redis group gid" + set_fact: + redis_group_gid: "{{ result.gid }}" + +- name: "Create redis user" + user: + name: "{{ redis.user }}" + group: "{{ redis.group }}" + comment: "User to run SRAM Redis service" + shell: "/bin/false" + password: "!" + home: "{{ redis.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save redis user uid" + set_fact: + redis_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "{{item.mode}}" + with_items: + - { path: "{{redis.conf_dir}}", mode: "0755" } + - { path: "{{redis.data_dir}}", mode: "0755" } + +- name: "Create redis config" + template: + src: "redis.conf.j2" + dest: "{{ redis.conf_dir }}/redis.conf" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "0644" + notify: "Restart redis container" + +- name: "Create redis container" + community.docker.docker_container: + name: "sram-redis" + image: "{{ redis.image }}" + restart_policy: "always" + state: "started" + user: "{{ redis_user_uid }}:{{ redis_group_gid }}" + command: | + redis-server /usr/local/etc/redis/redis.conf + volumes: + - "{{ redis.conf_dir }}:/usr/local/etc/redis" + - "{{ redis.data_dir }}:/data" + networks: + - name: loadbalancer diff --git a/roles/sram-redis/templates/redis.conf.j2 b/roles/sram-redis/templates/redis.conf.j2 new file mode 100644 index 000000000..ba231dc58 --- /dev/null +++ b/roles/sram-redis/templates/redis.conf.j2 @@ -0,0 +1,3 @@ +user {{redis.redis_user}} on +@all ~* &* >{{redis.redis_password}} +maxmemory {{ redis.max_memory }} +maxmemory-policy allkeys-lru diff --git a/roles/sram-redis/vars/main.yml b/roles/sram-redis/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-redis/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sram-sbs/defaults/main.yml b/roles/sram-sbs/defaults/main.yml new file mode 100644 index 000000000..8cbe8d109 --- /dev/null +++ b/roles/sram-sbs/defaults/main.yml @@ -0,0 +1,168 @@ +--- +sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" + +sbs_defaults: + base_domain: "test2.sram.surf.nl" + ansible_nolog: true + base_url: "https://{{ sbs_defaults.base_domain }}" + server_image: "ghcr.io/surfscz/sram-sbs-server:main" + client_image: "ghcr.io/surfscz/sram-sbs-client:main" + + openidc_timeout: 86400 + sram_conf_dir: "{{ current_release_appdir }}/sram" + + work_dir: "{{ sbs_defaults.sram_conf_dir }}/sbs" + git_dir: "{{ sbs_defaults.work_dir }}/sbs" + env_dir: "{{ sbs_defaults.work_dir }}/sbs-env" + conf_dir: "{{ sbs_defaults.work_dir }}/config" + log_dir: "{{ sbs_defaults.work_dir }}/log" + cert_dir: "{{ sbs_defaults.work_dir }}/cert" + apache_conf: "{{ sbs_defaults.work_dir }}/sbs_defaults.conf" + nginx_conf: "{{ sbs_defaults.work_dir }}/nginx.conf" + + db_name: "sbs" + db_user: "sbsrw" + # dbbackup_user: "sbs_backupper" + migration_user: "sbsmigrate" + + db_connection: "\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_defaults.db_name }}\ + ?ssl=true&charset=utf8mb4" + db_connection_sbs: "{{ sbs_defaults.db_connection | format(sbs_defaults.db_user, mysql_passwords.sbs) }}" + db_connection_migration: "\ + {{ sbs_defaults.db_connection | format(sbs_defaults.migration_user, mysql_passwords.sbsmigrate) }}" + + db_secret: secret + secret_key_suffix: suffix + encryption_key: encryption_key + + redis_host: sram-redis + redis_port: 6379 + redis_ssl: false + redis_user: default + + mail_host: "host.docker.internal" + mail_port: 25 + + user: "sbs" + group: "sbs" + + session_lifetime: 1440 + secret_key_suffix: "" + + oidc_crypto_password: "CHANGEME" + uid_attribute: "sub" + + disclaimer_color: "#a29c13" + disclaimer_label: wsgi + + urn_namespace: "urn:example:sbs" + eppn_scope: "sbs.example.edu" + restricted_co_default_org: "example.org" + + mail_sender_name: "SURF" + mail_sender_email: "no-reply@localhost" + exceptions_mail: "root@localhost" + + support_email: "sram-support@localhost" + admin_email: "sram-beheer@localhost" + ticket_email: "sram-support@surf.nl" + eduteams_email: "eduteams@localhost" + + suppress_mails: False + + wiki_link: "https://www.example.org/wiki" + + cron_hour_of_day: 4 + seed_allowed: True + api_keys_enabled: True + feedback_enabled: True + audit_trail_notifications_enabled: True + send_exceptions: False + send_js_exceptions: False + second_factor_authentication_required: True + totp_token_name: "SRAM-example" + notifications_enabled: True + invitation_reminders_enabled: True + invitation_expirations_enabled: True + open_requests_enabled: True + scim_sweep: False + impersonation_allowed: True + admin_platform_backdoor_totp: True + past_dates_allowed: True + mock_scim_enabled: True + log_to_stdout: True + + delete_orphaned: True + suspension_inactive_days: 365 + suspension_reminder_days: 14 + suspension_notify_admin: False + + oidc_config_url: "http://localhost/.well-known/openid-configuration" + oidc_authz_endpoint: "http://localhost/OIDC/authorization" + oidc_token_endpoint: "http://localhost/OIDC/token" + oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" + oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" + oidc_redirect_uri: "https://{{sbs_defaults.base_domain}}/api/users/resume-session" + mfa_idp_allowed: false + eduteams_continue_endpoint: "https://localhost/continue" + eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" + oidc_jwt_audience: "https://localhost" + continue_eduteams_redirect_uri: "https://localhost/continue" + oidc_verify_peer: False + oidc_scopes: + - openid + + manage_base_enabled: False + manage_base_url: "https://manage.{{base_domain}}" + manage_sram_rp_entity_id: "sbs.{{sbs_defaults.base_domain}}" + manage_verify_peer: False + + idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " + # backup_dir: "{{backup_base}}/sbs" + + swagger_enabled: true + + ssid_identity_providers: [] + surf_secure_id: + environment: "unknown.example.org" + sp_entity_id: "https://sbs.{{sbs_defaults.base_domain}}" + acs_url: "https://{{sbs_defaults.base_domain}}/api/users/acs" + sa_gw_environment: "sa-gw.unknown.example.org" + sa_idp_certificate: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + priv: | + -----BEGIN RSA PRIVATE KEY----- + abcde + -----END RSA PRIVATE KEY----- + pub: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + + ssid_authncontext: "\ + http://{{ sbs_defaults.surf_secure_id.environment }}/assurance/sfo-level2" + ssid_entityid: "\ + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" + ssid_sso_endpoint: "\ + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + + mfa_sso_minutes: 10 + mfa_fallback_enabled: true + + ldap_url: "ldap://ldap.example.com/dc=example,dc=com" + ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" + + csp_style_hashes: + - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' + - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' + - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' + - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw=' + - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ=' + - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw=' + - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request + - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request + + engine_block_api_token: secret diff --git a/roles/sram-sbs/files/yarn.gpg b/roles/sram-sbs/files/yarn.gpg new file mode 100644 index 000000000..3e9e7d155 --- /dev/null +++ b/roles/sram-sbs/files/yarn.gpg @@ -0,0 +1,243 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1 + +mQINBFf0j5oBEADS6cItqCbf4lOLICohq2aHqM5I1jsz3DC4ddIU5ONbKXP1t0wk +FEUPRzd6m80cTo7Q02Bw7enh4J6HvM5XVBSSGKENP6XAsiOZnY9nkXlcQAPFRnCn +CjEfoOPZ0cBKjn2IpIXXcC+7xh4p1yruBpOsCbT6BuzA+Nm9j4cpRjdRdWSSmdID +TyMZClmYm/NIfCPduYvNZxZXhW3QYeieP7HIonhZSHVu/jauEUyHLVsieUIvAOJI +cXYpwLlrw0yy4flHe1ORJzuA7EZ4eOWCuKf1PgowEnVSS7Qp7lksCuljtfXgWelB +XGJlAMD90mMbsNpQPF8ywQ2wjECM8Q6BGUcQuGMDBtFihobb+ufJxpUOm4uDt0y4 +zaw+MVSi+a56+zvY0VmMGVyJstldPAcUlFYBDsfC9+zpzyrAqRY+qFWOT2tj29R5 +ZNYvUUjEmA/kXPNIwmEr4oj7PVjSTUSpwoKamFFE6Bbha1bzIHpdPIRYc6cEulp3 +dTOWfp+Cniiblp9gwz3HeXOWu7npTTvJBnnyRSVtQgRnZrrtRt3oLZgmj2fpZFCE +g8VcnQOb0iFcIM7VlWL0QR4SOz36/GFyezZkGsMlJwIGjXkqGhcEHYVDpg0nMoq1 +qUvizxv4nKLanZ5jKrV2J8V09PbL+BERIi6QSeXhXQIui/HfV5wHXC6DywARAQAB +tBxZYXJuIFBhY2thZ2luZyA8eWFybkBkYW4uY3g+iQI5BBMBCAAjBQJX9I+aAhsD +BwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQFkawG4blAxB52Q/9FcyGIEK2 +QamDhookuoUGGYjIeN+huQPWmc6mLPEKS2Vahk5jnJKVtAFiaqINiUtt/1jZuhF2 +bVGITvZK79kM6lg42xQcnhypzQPgkN7GQ/ApYqeKqCh1wV43KzT/CsJ9TrI0SC34 +qYHTEXXUprAuwQitgAJNi5QMdMtauCmpK+Xtl/72aetvL8jMFElOobeGwKgfLo9+ +We2EkKhSwyiy3W5TYI1UlV+evyyT+N0pmhRUSH6sJpzDnVYYPbCWa2b+0D/PHjXi +edKcely/NvqyVGoWZ+j41wkp5Q0wK2ybURS1ajfaKt0OcMhRf9XCfeXAQvU98mEk +FlfPaq0CXsjOy8eJXDeoc1dwxjDi2YbfHel0CafjrNp6qIFG9v3JxPUU19hG9lxD +Iv7VXftvMpjJCo/J4Qk+MOv7KsabgXg1iZHmllyyH3TY4AA4VA+mlceiiOHdXbKk +Q3BfS1jdXPV+2kBfqM4oWANArlrFTqtop8PPsDNqh/6SrVsthr7WTvC5q5h/Lmxy +Krm4Laf7JJMvdisfAsBbGZcR0Xv/Vw9cf2OIEzeOWbj5xul0kHT1vHhVNrBNanfe +t79RTDGESPbqz+bTS7olHWctl6TlwxA0/qKlI/PzXfOg63Nqy15woq9buca+uTcS +ccYO5au+g4Z70IEeQHsq5SC56qDR5/FvYyu5Ag0EV/SPmgEQANDSEMBKp6ER86y+ +udfKdSLP9gOv6hPsAgCHhcvBsks+ixeX9U9KkK7vj/1q6wodKf9oEbbdykHgIIB1 +lzY1l7u7/biAtQhTjdEZPh/dt3vjogrJblUEC0rt+fZe325ociocS4Bt9I75Ttkd +nWgkE4uOBJsSllpUbqfLBfYR58zz2Rz1pkBqRTkmJFetVNYErYi2tWbeJ59GjUN7 +w1K3GhxqbMbgx4dF5+rjGs+KI9k6jkGeeQHqhDk+FU70oLVLuH2Dmi9IFjklKmGa +3BU7VpNxvDwdoV7ttRYEBcBnPOmL24Sn4Xhe2MDCqgJwwyohd9rk8neV7GtavVea +Tv6bnzi1iJRgDld51HFWG8X+y55i5cYWaiXHdHOAG1+t35QUrczm9+sgkiKSk1II +TlEFsfwRl16NTCMGzjP5kGCm/W+yyyvBMw7CkENQcd23fMsdaQ/2UNYJau2PoRH/ +m+IoRehIcmE0npKeLVTDeZNCzpmfY18T542ibK49kdjZiK6G/VyBhIbWEFVu5Ll9 ++8GbcO9ucYaaeWkFS8Hg0FZafMk59VxKiICKLZ5he/C4f0UssXdyRYU6C5BH8UTC +QLg0z8mSSL+Wb2iFVPrn39Do7Zm8ry6LBCmfCf3pI99Q/1VaLDauorooJV3rQ5kC +JEiAeqQtLOvyoXIex1VbzlRUXmElABEBAAGJAh8EGAEIAAkFAlf0j5oCGwwACgkQ +FkawG4blAxAUUQ//afD0KLHjClHsA/dFiW+5qVzI8kPMHwO1QcUjeXrB6I3SluOT +rLSPhOsoS72yAaU9hFuq8g9ecmFrl3Skp/U4DHZXioEmozyZRp7eVsaHTewlfaOb +6g7+v52ktYdomcp3BM5v/pPZCnB5rLrH2KaUWbpY6V6tqtCHbF7zftDqcBENJDXf +hiCqS19J08GZFjDEqGDrEj3YEmEXZMN7PcXEISPIz6NYI6rw4yVH8AXfQW6vpPzm +ycHwI0QsVW2NQdcZ6zZt+phm6shNUbN2iDdg3BJICmIvQf8qhO3bOh0Bwc11FLHu +MKuGVxnWN82HyIsuUB7WDLBHEOtg61Zf1nAF1PQK52YuQz3EWI4LL9OqVqfSTY1J +jqIfj+u1PY2UHrxZfxlz1M8pXb1grozjKQ5aNqBKRrcMZNx71itR5rv18qGjGR2i +Sciu/xah7zAroEQrx72IjYt03tbk/007CvUlUqFIFB8kY1bbfX8JAA+TxelUniUR +2CY8eom5HnaPpKE3kGXZ0jWkudbWb7uuWcW1FE/bO+VtexpBL3SoXmwbVMGnJIEi +Uvy8m6ez0kzLXzJ/4K4b8bDO4NjFX2ocKdzLA89Z95KcZUxEG0O7kaDCu0x3BEge +uArJLecD5je2/2HXAdvkOAOUi6Gc/LiJrtInc0vUFsdqWCUK5Ao/MKvdMFW5Ag0E +V/SP2AEQALRcYv/hiv1n3VYuJbFnEfMkGwkdBYLGo3hiHKY8xrsFVePl9SkL8aqd +C310KUFNI42gGY/lz54RUHOqfMszTdafFrmwU18ECWGo4oG9qEutIKG7fkxcvk2M +tgsOMZFJqVDS1a9I4QTIkv1ellLBhVub9S7vhe/0jDjXs9IyOBpYQrpCXAm6SypC +fpqkDJ4qt/yFheATcm3s8ZVTsk2hiz2jnbqfvpte3hr3XArDjZXr3mGAp3YY9JFT +zVBOhyhT/92e6tURz8a/+IrMJzhSyIDel9L+2sHHo9E+fA3/h3lg2mo6EZmRTuvE +v9GXf5xeP5lSCDwS6YBXevJ8OSPlocC8Qm8ziww6dy/23XTxPg4YTkdf42i7VOpS +pa7EvBGne8YrmUzfbrxyAArK05lo56ZWb9ROgTnqM62wfvrCbEqSHidN3WQQEhMH +N7vtXeDPhAd8vaDhYBk4A/yWXIwgIbMczYf7Pl7oY3bXlQHb0KW/y7N3OZCr5mPW +94VLLH/v+T5R4DXaqTWeWtDGXLih7uXrG9vdlyrULEW+FDSpexKFUQe83a+Vkp6x +GX7FdMC9tNKYnPeRYqPF9UQEJg+MSbfkHSAJgky+bbacz+eqacLXMNCEk2LXFV1B +66u2EvSkGZiH7+6BNOar84I3qJrU7LBD7TmKBDHtnRr9JXrAxee3ABEBAAGJBEQE +GAEIAA8FAlf0j9gCGwIFCQHhM4ACKQkQFkawG4blAxDBXSAEGQEIAAYFAlf0j9gA +CgkQ0QH3iZ1B88PaoA//VuGdF5sjxRIOAOYqXypOD9/Kd7lYyxmtCwnvKdM7f8O5 +iD8oR2Pk1RhYHjpkfMRVjMkaLfxIRXfGQsWfKN2Zsa4zmTuNy7H6X26XW3rkFWpm +dECz1siGRvcpL6NvwLPIPQe7tST72q03u1H7bcyLGk0sTppgMoBND7yuaBTBZkAO +WizR+13x7FV+Y2j430Ft/DOe/NTc9dAlp6WmF5baOZClULfFzCTf9OcS2+bo68oP +gwWwnciJHSSLm6WRjsgoDxo5f3xBJs0ELKCr4jMwpSOTYqbDgEYOQTmHKkX8ZeQA +7mokc9guA0WK+DiGZis85lU95mneyJ2RuYcz6/VDwvT84ooe1swVkC2palDqBMwg +jZSTzbcUVqZRRnSDCe9jtpvF48WK4ZRiqtGO6Avzg1ZwMmWSr0zHQrLrUMTq/62W +KxLyj2oPxgptRg589hIwXVxJRWQjFijvK/xSjRMLgg73aNTq6Ojh98iyKAQ3HfzW +6iXBLLuGfvxflFednUSdWorr38MspcFvjFBOly+NDSjPHamNQ2h19iHLrYT7t4ve +nU9PvC+ORvXGxTN8mQR9btSdienQ8bBuU/mg/c417w6WbY7tkkqHqUuQC9LoaVdC +QFeE/SKGNe+wWN/EKi0QhXR9+UgWA41Gddi83Bk5deuTwbUeYkMDeUlOq3yyemcG +VxAA0PSktXnJgUj63+cdXu7ustVqzMjVJySCKSBtwJOge5aayonCNxz7KwoPO34m +Gdr9P4iJfc9kjawNV79aQ5aUH9uU2qFlbZOdO8pHOTjy4E+J0wbJb3VtzCJc1Eaa +83kZLFtJ45Fv2WQQ2Nv3Fo+yqAtkOkaBZv9Yq0UTaDkSYE9MMzHDVFx11TT21NZD +xu2QiIiqBcZfqJtIFHN5jONjwPG08xLAQKfUNROzclZ1h4XYUT+TWouopmpNeay5 +JSNcp5LsC2Rn0jSFuZGPJ1rBwB9vSFVA/GvOj8qEdfhjN3XbqPLVdOeChKuhlK0/ +sOLZZG91SHmT5SjP2zM6QKKSwNgHX4xZt4uugSZiY13+XqnrOGO9zRH8uumhsQmI +eFEdT27fsXTDTkWPI2zlHTltQjH1iebqqM9gfa2KUt671WyoL1yLhWrgePvDE+He +r002OslvvW6aAIIBki3FntPDqdIH89EEB4UEGqiA1eIZ6hGaQfinC7/IOkkm/mEa +qdeoI6NRS521/yf7i34NNj3IaL+rZQFbVWdbTEzAPtAs+bMJOHQXSGZeUUFrEQ/J +ael6aNg7mlr7cacmDwZWYLoCfY4w9GW6JHi6i63np8EA34CXecfor7cAX4XfaokB +XjyEkrnfV6OWYS7f01JJOcqYANhndxz1Ph8bxoRPelf5q+W5Ag0EWBU7dwEQAL1p +wH4prFMFMNV7MJPAwEug0Mxf3OsTBtCBnBYNvgFB+SFwKQLyDXUujuGQudjqQPCz +/09MOJPwGCOi0uA0BQScJ5JAfOq33qXi1iXCj9akeCfZXCOWtG3Izc3ofS6uee7K +fWUF1hNyA3PUwpRtM2pll+sQEO3y/EN7xYGUOM0mlCawrYGtxSNMlWBlMk/y5HK9 +upz+iHwUaEJ4PjV+P4YmDq0PnPvXE4qhTIvxx0kO5oZF0tAJCoTg1HE7o99/xq9Z +rejDR1JJj6btNw1YFQsRDLxRZv4rL9He10lmLhiQE8QN7zOWzyJbRP++tWY2d2zE +yFzvsOsGPbBqLDNkbb9d8Bfvp+udG13sHAEtRzI2UWe5SEdVHobAgu5l+m10WlsN +TG/L0gJe1eD1bwceWlnSrbqw+y+pam9YKWqdu18ETN6CeAbNo4w7honRkcRdZyoG +p9zZf3o1bGBBMla6RbLuJBoRDOy2Ql7B+Z87N0td6KlHI6X8fNbatbtsXR7qLUBP +5oRb6nXX4+DnTMDbvFpE2zxnkg+C354Tw5ysyHhM6abB2+zCXcZ3holeyxC+BUrO +gGPyLH/s01mg2zmttwC1UbkaGkQ6SwCoQoFEVq9Dp96B6PgZxhEw0GMrKRw53LoX +4rZif9Exv6qUFsGY8U9daEdDPF5UHYe7t/nPpfW3ABEBAAGJBD4EGAEIAAkFAlgV +O3cCGwICKQkQFkawG4blAxDBXSAEGQEIAAYFAlgVO3cACgkQRsITDf0kl/VynQ/+ +P3Vksu4fno26vA7ml9bzV3mu/X/gzU1HqySqYv9Zwzk2o512Z4QkoT/8lRepIG7v +AFRQzPn56Pz/vpMfiMDaf6thxs8wpv4y3m+rcQIQKO4sN3wwFPPbvM8wGoY6fGav +IkLKKIXy1BpzRGltGduf0c29+ycvzccQpyuTrZk4Zl73kLyBS8fCt+MZWejMMolD +uuLJiHbXci6+Pdi3ImabyStbNnJYmSyruNHcLHlgIbyugTiAcdTy0Bi/z8MfeYwj +VAwEkX4b2NwtuweYLzupBOTv0SqYCmBduZObkS5LHMZ+5Yh9Hfrd04uMdO5cIiy0 +AsGehTRC3Xyaea7Qk993rNcGEzX7LNB1GB2BXSq9FYPb+q0ewf8k8Lr9E0WG0dvD +OaJSkSGedgdA1QzvTgpAAkVWsXlksShVf4NVskxNUGDRaPLeRB+IV/5jO+kRsFuO +g5Tlkn6cgu1+Bn5gIfv0ny9K7TeC697gRQIcK8db1t8XidgSKbRmsSYEaRCy3c9x +w2/N7DLU/Js3gV8FUd7cZpaYN+k/erMdyfqLA7oFd+HLbA5Du/971yF8/6Bof8zp +jB9+QPRIARpcROEcQXz09dtl8wW8M0r09xpna+0Jk6JxF+stD97+hzikQXIxUtCX +j35ps9USSxv1cuz0MaFdWGW13OugtN4bQ2DNgelbTDUEKg//YTbBl9oGYQxHv9S5 +qvZVNvV3DuI18E5VW5ddyo/JfW24+Tukli/ZjPQYnMOP86nnIqo/LPGb4nV1uWL4 +KhmOCbH7t43+TkAwdwoxLjYP7iOqQp9VRPFjomUfvtmLjHp4r3cVEt5QeJEZLiSC +zSKMjPKqRMo5nNs3Et+/FyWCMRYdSggwhBfkbKKo44H9pmL3bTLqyir7EJAcArla +zjKMyZqRsK3gZfQgoASN5xAhemVWHnnecVSAqrOW599EBkc7Kf6lXjTVHtHN02vX +YYRZ16zrEjrfwb23LR+lAxSfWxLDovKLBg2SPbpduEv1GxyEFgF7v9fco4aQbuh/ +fOGvA8nuXkC5nI6ukw4c4zwmJ5+SNQthFUYKWLd4hR4qrCoJkMEWZmsCRtqxjVCJ +/i9ygRJHOGAWaam7bS+U7pdmq2mgF+qTxb2vX6mSzI3q3M7drGUA3EdaZo1hPA5u +kWi7tMCGqPQmtUFRnUvHPzCDuXLYT8lRxhTxDi3T5MXdIUlAUTcNpwG8Ill0xkGc +pMlh0D5p44GEdMFfJiXw6AUETHcqC2qZr2rP9kpzvVlapIrsPRg/DU+s70YnccI3 +iMCVm4/WrghFeK232zkjiwRVOm+IEWBlDFrm4MMjfguUeneYbK9WhqJnss9nc4QK +Vhzuyn3GTtg1w/T6CaYVXBjcHFmJBEQEGAEIAA8CGwIFAlokZSMFCQQWmKMCKcFd +IAQZAQgABgUCWBU7dwAKCRBGwhMN/SSX9XKdD/4/dWSy7h+ejbq8DuaX1vNXea79 +f+DNTUerJKpi/1nDOTajnXZnhCShP/yVF6kgbu8AVFDM+fno/P++kx+IwNp/q2HG +zzCm/jLeb6txAhAo7iw3fDAU89u8zzAahjp8Zq8iQsoohfLUGnNEaW0Z25/Rzb37 +Jy/NxxCnK5OtmThmXveQvIFLx8K34xlZ6MwyiUO64smIdtdyLr492LciZpvJK1s2 +cliZLKu40dwseWAhvK6BOIBx1PLQGL/Pwx95jCNUDASRfhvY3C27B5gvO6kE5O/R +KpgKYF25k5uRLkscxn7liH0d+t3Ti4x07lwiLLQCwZ6FNELdfJp5rtCT33es1wYT +Nfss0HUYHYFdKr0Vg9v6rR7B/yTwuv0TRYbR28M5olKRIZ52B0DVDO9OCkACRVax +eWSxKFV/g1WyTE1QYNFo8t5EH4hX/mM76RGwW46DlOWSfpyC7X4GfmAh+/SfL0rt +N4Lr3uBFAhwrx1vW3xeJ2BIptGaxJgRpELLdz3HDb83sMtT8mzeBXwVR3txmlpg3 +6T96sx3J+osDugV34ctsDkO7/3vXIXz/oGh/zOmMH35A9EgBGlxE4RxBfPT122Xz +BbwzSvT3Gmdr7QmTonEX6y0P3v6HOKRBcjFS0JePfmmz1RJLG/Vy7PQxoV1YZbXc +66C03htDYM2B6VtMNQkQFkawG4blAxCiVRAAhq/1L5YlsmItiC6MROtPP+lfAWRm +MSkoIuAtzkV/orqPetwWzjYLgApOvVXBuf9FdJ5vAx1IXG3mDx6mQQWkr4t9onwC +UuQ7lE29qmvCHB3FpKVJPKiGC6xK38t5dGAJtbUMZBQb1vDuQ7new8dVLzBSH1VZ +7gx9AT+WEptWznb1US1AbejO0uT8jsVc/McK4R3LQmVy9+hbTYZFz1zCImuv9SCN +ZPSdLpDe41QxcMfKiW7XU4rshJULKd4HYG92KjeJU80zgCyppOm85ENiMz91tPT7 ++A4O7XMlOaJEH8t/2SZGBE/dmHjSKcWIpJYrIZKXTrNv7rSQGvweNG5alvCAvnrL +J2cRpU1Rziw7auEU1YiSse+hQ1ZBIzWhPMunIdnkL/BJunBTVE7hPMMG7alOLy5Z +0ikNytVewasZlm/dj5tEsfvF7tisVTZWVjWCvEMTP5fecNMEAwbZdBDyQBAN00y7 +xp4Pwc/kPLuaqESyTTt8jGek/pe7/+6fu0GQmR2gZKGagAxeZEvXWrxSJp/q81XS +QGcO6QYMff7VexY3ncdjSVLro+Z3ZtYt6aVIGAEEA5UE341yCGIeN+nr27CXD4fH +F28aPh+AJzYh+uVjQhHbL8agwcyCMLgU88u1U0tT5Qtjwnw+w+3UNhROvn495REp +eEwD60iVeiuF5FW5Ag0EWbWWowEQALCiEk5Ic40W7/v5hqYNjrRlxTE/1axOhhzt +8eCB7eOeNOMQKwabYxqBceNmol/guzlnFqLtbaA6yZQkzz/K3eNwWQg7CfXO3+p/ +dN0HtktPfdCk+kY/t7StKRjINW6S9xk9KshiukmdiDq8JKS0HgxqphBB3tDjmo6/ +RiaOEFMoUlXKSU+BYYpBpLKg53P8F/8nIsK2aZJyk8XuBd0UXKI+N1gfCfzoDWnY +Hs73LQKcjrTaZQauT81J7+TeWoLI28vkVxyjvTXAyjSBnhxTYfwUNGSoawEXyJ1u +KCwhIpklxcCMI9Hykg7sKNsvmJ4uNcRJ7cSRfb0g5DR9dLhR+eEvFd+o4PblKk16 +AI48N8Zg1dLlJuV2cAtl0oBPk+tnbZukvkS5n1IzTSmiiPIXvK2t506VtfFEw4iZ +rJWf2Q9//TszBM3r1FPATLH7EAeG5P8RV+ri7L7NvzP6ZQClRDUsxeimCSe8v/t0 +OpheCVMlM9TpVcKGMw8ig/WEodoLOP4iqBs4BKR7fuydjDqbU0k/sdJTltp7IIdK +1e49POIQ7pt+SUrsq/HnPW4woLC1WjouBWyr2M7/a0SldPidZ2BUAK7O9oXosidZ +MJT7dBp3eHrspY4bdkSxsd0nshj0ndtqNktxkrSFRkoFpMz0J/M3Q93CjdHuTLpT +HQEWjm/7ABEBAAGJBEQEGAEIAA8FAlm1lqMCGwIFCQJ2LQACKQkQFkawG4blAxDB +XSAEGQEIAAYFAlm1lqMACgkQ4HTRbrb/TeMpDQ//eOIsCWY2gYOGACw42JzMVvuT +DrgRT4hMhgHCGeKzn1wFL1EsbSQV4Z6pYvnNayuEakgIz14wf4UFs5u1ehfBwatm +akSQJn32ANcAvI0INAkLEoqqy81mROjMc9FFrOkdqjcN7yN0BzH9jNYL/gsvmOOw +Ou+dIH3C1Lgei844ZR1BZK1900mohuRwcji0sdROMcrKrGjqd4yb6f7yl0wbdAxA +3IHT3TFGczC7Y41P2OEpaJeVIZZgxkgQsJ14qK/QGpdKvmZAQpjHBipeO/H+qxyO +T5Y+f15VLWGOOVL090+ZdtF7h3m4X2+L7xWsFIgdOprfO60gq3e79YFfgNBYU5BG +tJGFGlJ0sGtnpzx5QCRka0j/1E5lIu00sW3WfGItFd48hW6wHCloyoi7pBR7xqSE +oU/U5o7+nC8wHFrDYyqcyO9Q3mZDw4LvlgnyMOM+qLv/fNgO9USE4T30eSvc0t/5 +p1hCKNvyxHFghdRSJqn70bm6MQY+kd6+B/k62Oy8eCwRt4PR+LQEIPnxN7xGuNpV +O1oMyhhO41osYruMrodzw81icBRKYFlSuDOQ5jlcSajc6TvF22y+VXy7nx1q/CN4 +tzB/ryUASU+vXS8/QNM6qI/QbbgBy7VtHqDbs2KHp4cP0j9KYQzMrKwtRwfHqVrw +FLkCp61EHwSlPsEFiglpMg/8DQ92O4beY0n7eSrilwEdJg89IeepTBm1QYiLM33q +WLR9CABYAIiDG7qxviHozVfX6kUwbkntVpyHAXSbWrM3kD6jPs3u/dimLKVyd29A +VrBSn9FC04EjtDWsj1KB7HrFN4oo9o0JLSnXeJb8FnPf3MitaKltvj/kZhegozIs ++zvpzuri0LvoB4fNA0T4eAmxkGkZBB+mjNCrUHIakyPZVzWGL0QGsfK1Q9jvw0OE +rqHJYX8A1wLre/HkBne+e5ezS6Mc7kFW33Y1arfbHFNAe12juPsOxqK76qNilUbQ +pPtNvWP3FTpbkAdodMLq/gQ+M5yHwPe8SkpZ8wYCfcwEemz/P+4QhQB8tbYbpcPx +J+aQjVjcHpsLdrlSY3JL/gqockR7+97GrCzqXbgvsqiWr16Zyn6mxYWEHn9HXMh3 +b+2IYKFFXHffbIBq/mfibDnZtQBrZpn2uyh6F2ZuOsZh0LTD7RL53KV3fi90nS00 +Gs1kbMkPycL1JLqvYQDpllE2oZ1dKDYkwivGyDQhRNfERL6JkjyiSxfZ2c84r2HP +gnJTi/WBplloQkM+2NfXrBo6kLHSC6aBndRKk2UmUhrUluGcQUyfzYRFH5kVueIY +fDaBPus9gb+sjnViFRpqVjefwlXSJEDHWP3Cl2cuo2mJjeDghj400U6pjSUW3bIC +/PK5Ag0EXCxEEQEQAKVjsdljwPDGO+48879LDa1d7GEu/Jm9HRK6INCQiSiS/0mH +keKa6t4DRgCY2ID9lFiegx2Er+sIgL0chs16XJrFO21ukw+bkBdm2HYUKSsUFmr/ +bms8DkmAM699vRYVUAzO9eXG/g8lVrAzlb3RT7eGHYKd15DT5KxXDQB+T+mWE9qD +5RJwEyPjSU+4WjYF+Rr9gbSuAt5UySUb9jTR5HRNj9wtb4YutfP9jbfqy8esQVG9 +R/hpWKb2laxvn8Qc2Xj93qNIkBt/SILfx9WDJl0wNUmu+zUwpiC2wrLFTgNOpq7g +9wRPtg5mi8MXExWwSF2DlD54yxOOAvdVACJFBXEcstQ3SWg8gxljG8eLMpDjwoIB +ax3DZwiYZjkjJPeydSulh8vKoFBCQkf2PcImXdOk2HqOV1L7FROM6fKydeSLJbx1 +7SNjVdQnq1OsyqSO0catAFNptMHBsN+tiCI29gpGegaoumV9cnND69aYvyPBgvdt +mzPChjSmc6rzW1yXCJDm2qzwm/BcwJNXW5B3EUPxc0qSWste9fUna0G4l/WMuaIz +VkuTgXf1/r9HeQbjtxAztxH0d0VgdHAWPDkUYmztcZ4sd0PWkVa18qSrOvyhI96g +CzdvMRLX17m1kPvP5PlPulvqizjDs8BScqeSzGgSbbQVm5Tx4w2uF4/n3FBnABEB +AAGJBEQEGAECAA8FAlwsRBECGwIFCQIKEgACKQkQFkawG4blAxDBXSAEGQECAAYF +AlwsRBEACgkQI+cWZ4i2Ph6B0g//cPis3v2M6XvAbVoM3GIMXnsVj1WAHuwA/ja7 +UfZJ9+kV/PiMLkAbW0fBj0/y0O3Ry12VVQGXhC+Vo4j6C8qwFP4OXa6EsxHXuvWM +IztBaX1Kav613aXBtxp6tTrud0FFUh4sDc1RREb3tMr6y5cvFJgnrdWcX1gsl6OD +cgWBGNc6ZX7H7j48hMR6KmNeZocW7p8W+BgDQJqXYwVNL15qOHzVAh0dWsFLE9gw +BTmDCY03x9arxSNDGCXyxt6E77LbNVIoSRlEbkvi6j33nEbuERICYl6CltXQCyiV +KjheJcLMjbgv5+bLCv2zfeJ/WyOmOGKpHRu+lBV1GvliRxUblVlmjWPhYPBZXGyj +II16Tqr+ilREcZFW+STccbrVct75JWLbxwlEmix+W1HwSRCR+KHx3Cur4ZPMOBlP +sFilOOsNa7ROUB56t7zv21Ef3BeeaCd9c4kzNGN8d1icEqSXoWWPqgST0LZPtZyq +WZVnWrHChVHfrioxhSnw8O3wY1A2GSahiCSvvjvOeEoJyU21ZMw6AVyHCh6v42oY +adBfGgFwNo5OCMhNxNy/CcUrBSDqyLVTM5QlNsT75Ys7kHHnc+Jk+xx4JpiyNCz5 +LzcPhlwpqnJQcjJdY1hDhK75Ormj/NfCMeZ8g1aVPX4xEq8AMyZYhZ5/lmM+13Rd +v8ZW6FK7HQ/+IAKzntxOjw0MzCXkksKdmIOZ2bLeOVI8aSLaUmoT5CLuoia9g7iF +HlYrSY+01riRrAaPtYx0x8onfyVxL9dlW/Fv5+qc1fF5FxdhyIgdqgzm82TnXHu/ +haUxYmUvNrbsmmNl5UTTOf+YQHMccKFdYfZ2rCBtbN2niXG1tuz2+k83pozu4mJ1 +rOOLNAsQoY3yR6OODte1FyOgp7blwDhTIoQb8/UiJ7CMBI3OPrfoXFAnhYoxeRSA +N4UFu9/HIkqfaQgRPCZS1gNerWF6r6yz9AZWUZqjSJssjBqXCtK9bGbTYBZk+pw3 +H9Nd0RJ2WJ9qPqmlmUr1wdqct0ChsJx1xAT86QrssicJ/HFFmF45hlnGkHUBWLaV +Jt8YkLb/DqOIbVbwyCLQtJ80VQLEeupfmu5QNsTpntRYNKf8cr00uc8vSYXYFRxa +5H5oRT1eoFEEjDDvokNnHXfT+Hya44IjYpzaqvAgeDp6sYlOdtWIv/V3s+trxACw +TkRN7zw3lLTbT8PK9szK0fYZ5KHG1/AKH+mbZ6qNc/25PNbAFRtttLGuEIC3HJ12 +IAp2JdjioeD2OnWLu4ZeCT2CKKFsleZPrSyCrn3gyZPmfYvv5h2JbQNO6uweOrZE +NWX5SU43OBoplbuKJZsMP6p6NahuGnIeJLlv509JYAf/HN4ARyvvOpO5Ag0EXDf1 +bwEQAKBByJMoxQ7H6AsQP29qjY8/pfDiNloQDHasUXoOyTfUetam3rY/UWCHFrMD +0jvOHNIqEVJPsSWrxBYf+i4NNECsCSj39JHdVLOkn6pJcRnMzmljS8ojOybYRUTT +KdKlV+jYy6hqAjTvnf/pzZOrNseKyxAo/xETphN2UEBKOZwV5j5YV6VXptt6xn1x +EL1wzahZr6qz/gXn5//mg6aPPUCJt7BPBtC34HGoyHUn4Cx/jSU7zlQLV11VyTyt +/TY69Wgc1k21oS0tm44uw8D+4bIXYewxNq0utt75c75JK5rPKCpIkaSgE3YUPAhM +fpoUxSgo+hrTaocLbQm3/fDfRqYhw9IWrOuWLYEEI5NqS0etq2X+nM2oEXymxUM1 +45dicUv27B1YU5IciRaoA3Bwkl3uyvLhkwBNgJGpBoRsgyWKhlUpdMOSAFPHag0D +HNCKbFTGxZOJ1+BoDsIscK864AodI0YvhMFByWGRwQMszQpK/vg9uUdIMDYTzI0i +nvCrOht4R91z/2VZXHlv4D38UYsVE5P6u7N8T6T4SzERBKSktWhnJmMRJK5FQQwM +zWCnSj9TGMC5+JYeMjRV1pUwpZw8iOlDg0x8LfMQ3XbZ0/bvlPsXOjiYmHAjrLZf +qL0vR5jPyrfVUxF/XHJBBC9SEvvXrEDK+G+V9NmNavUNrhLnABEBAAGJBEQEGAEC +AA8FAlw39W8CGwIFCQH+NIACKQkQFkawG4blAxDBXSAEGQECAAYFAlw39W8ACgkQ +T3dnk2lHW6p0eg/+K2JJu1RbTSLJPFYQhLcxX+5d2unkuNLIy3kArtZuB992E2Fw +00okPGtuPdSyk2ygh4DeYnwmabIWChi7LDp+YnqcI4GfMxNG6RsHs+A/77rLBST3 +BB1sejZppmKCQZDSC2pvYaZBpS80UvftCZ9RFdY+kTC22Btn/5ekiQOfIqhUH9Cy +GWS/YlGciomVIVn1hSPN8l4EpBCDtceRaephvzjQIZT3AxOfSlpwJviYjAOkSX4q +WyIjC5Ke5kfEOldUuBN1JGAm45tKlrz/LD/+VOc2IWpbkOIAVSldUgpRyiIJQAZ8 +0trNxrJI7ncaID8lAa7pBptJiL0KorRjk3c6Y7p830Nwe0J5e5+W1RzN4wlR8+9u +uRyP8Mcwz/Hz2jwMiv38Vk4tAOe4PYNZuDnpjZ28yCpF3UUgvzjarubFAcg2jd8S +auCQFlmOfvT+1qIMSeLmWBOdlzJTUpJRcZqnkEE4WtiMSlxyWVFvUwOmKSGi8CLo +GW1Ksh9thQ9zKhvVUiVoKn4Z79HXr4pX6rnp+mweJ2dEZtlqD7HxjVTlCHn9fzCl +t/Nt0h721fJbS587AC/ZMgg5GV+GKu6Mij0sPAowUJVCIwN9uK/GHICZEAoMSngP +8xzKnhU5FD38vwBvsqbKxTtICrv2NuwnQ0WBBQ58w5mv2RCMr2W6iegSKIDjwxAA +hDpCw0dlUOodY4omJB19Ra9zIZO5IGxT2+oksks3uWkT/l+I7FY0+YNtIZnC01Ge +RJxJtuDwQXigYEKn1UEJ7ymBKrAdCEY0OC344AffLx81aOYWbbW7XaO6rZn8nyZu +0oC95dGlQQdWYJBLcTwANx50iQQGkR5a+XF87yVciFm6x5Cf78pzJ5OBvN3qLJzN +4YBftPMKIgbozGm6/3I6DDT0SMeCOhamshoBf7Ksqd6N+XUjRHZr7UwprWDJlhSC +XFF1e6tjlf22NwZ9UH29VswFkepT99tfBFpobjbzfABO0YnAj72WcR2ZKP7oYHf7 +EkhI2ssWQ9PRPTwdOSXZDEH0s4cJqO+ZzRoAPE+3hbHlGukAqZiiHRlNpOvPdO6Q +mgVBRsURs5i+4vylfat59HUtzQWbTF1bnZbMlefttb5CHRJNb3PTuxHR562Uzp9/ +/SZfDhAx7SYgwRF+FANWJsvX+I7CbP4qvOzutvIYTsNchbCxrOl+0PxMxWaYZzVb +ZW45mO0LFUNCFqcnr3Sot5e9n0C0vjKBV9XgICHKKgeHaMwOMirb1MKvvMpJ3+NI +BYZJ6d+LyhFXL0xJXccUnEXsmk2h4SBEEZYIhAk9ntRmzOXhXFLAOS8agWlmvYwh +xeeb76cVOYlpLw1utXV9hbuo+oM109vMs73mpF88g4g= +=oMDY +-----END PGP PUBLIC KEY BLOCK----- diff --git a/roles/sram-sbs/handlers/main.yml b/roles/sram-sbs/handlers/main.yml new file mode 100644 index 000000000..bc8be505b --- /dev/null +++ b/roles/sram-sbs/handlers/main.yml @@ -0,0 +1,9 @@ +--- +- name: Restart sbs containers + community.docker.docker_container: + name: "{{ item }}" + state: started + restart: true + loop: + - sram-sbs-client + - sram-sbs-server diff --git a/roles/sram-sbs/tasks/main.yml b/roles/sram-sbs/tasks/main.yml new file mode 100644 index 000000000..6881736ec --- /dev/null +++ b/roles/sram-sbs/tasks/main.yml @@ -0,0 +1,179 @@ +--- +# - name: "Initialize database" +# throttle: 1 +# import_tasks: "database_init.yml" +# when: "is_dev" + +- name: "Create SBS group" + group: + name: "{{ sbs.group }}" + state: "present" + register: "result" + +- name: "Save SBS group gid" + set_fact: + sbs_group_gid: "{{ result.gid }}" + +- name: "Create SBS user" + user: + name: "{{ sbs.user }}" + group: "{{ sbs.group }}" + comment: "User to run SBS service" + shell: "/bin/false" + password: "!" + home: "{{ sbs.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save sbs user uid" + set_fact: + sbs_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "{{sbs_user_uid}}" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { path: "{{sbs.work_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}/saml", mode: "0755" } + - { path: "{{sbs.log_dir}}", mode: "0775" } + - { path: "{{sbs.cert_dir}}", mode: "0755" } + +- name: "Fix file permissions" + file: + path: "{{sbs.log_dir}}/{{item}}" + owner: "{{sbs_user_uid}}" + group: "{{sbs_group_gid}}" + mode: "0664" + state: "touch" + modification_time: "preserve" + access_time: "preserve" + with_items: + - "sbs.log" + - "sbs.debug.log" + +# Create dummy file in certs dir to pacify container pre-init script +# https://github.com/SURFscz/SBS/pull/2312 +- name: "Touch file in {{ sbs.cert_dir }}" + ansible.builtin.copy: + content: "" + dest: "{{sbs.cert_dir}}/dummy" + +- name: "Create SBS config files" + template: + src: "{{item.name}}.j2" + dest: "{{ sbs.conf_dir }}/{{item.name}}" + owner: "{{sbs_user_uid}}" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { name: "config.yml", mode: "0644" } + - { name: "alembic.ini", mode: "0644" } + - { name: "disclaimer.css", mode: "0644" } + - { name: "sbs-apache.conf", mode: "0644" } + no_log: "{{sbs.ansible_nolog}}" + notify: "Restart sbs containers" + +- name: "Pull sbs image" + community.docker.docker_image_pull: + name: "{{ item }}" + with_items: + - "{{ sbs.client_image }}" + - "{{ sbs.server_image }}" + register: "sbs_image" + +# We need to remove sram-static so it gets repopulated +# with new SBS image static content +- name: "Clean up old containers" + block: + - name: "Stop and remove sbs and sbs-server containers" + community.docker.docker_container: + name: "{{ item }}" + state: "absent" + with_items: + - "sbs-client" + - "sbs-server" + + when: "sbs_image is changed" + +- name: "Run SBS migrations" + throttle: 1 + community.docker.docker_container: + name: "sram-sbs-migration" + image: "{{ sbs.server_image }}" + pull: "never" + state: "started" + restart_policy: "no" + detach: false + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + MIGRATIONS_ONLY: "1" + # don't actually run the server + command: "/bin/true" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" + - "{{ sbs.log_dir }}:/opt/sbs/log" + networks: + - name: "loadbalancer" + register: "result" + failed_when: "'container' not in result or result.container.State.ExitCode != 0" + changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" + notify: "Restart sbs containers" + +# Remove the migration container; we can do that with auto_remove, because if we use that, ansible +# will not save the output in result +- name: "Remove migration container" + community.docker.docker_container: + name: "sram-sbs-migration" + state: "absent" + # TODO: fix this by only running this if "sbs_image is changed" + changed_when: false + +- name: "Start sbs client container" + community.docker.docker_container: + name: "sram-sbs-client" + image: "{{ sbs.client_image }}" + pull: "never" + restart_policy: "always" + state: "started" + volumes: + - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + - "{{ sbs.conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" + networks: + - name: "loadbalancer" + labels: + traefik.http.routers.sbsclient.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbsclient.tls: "true" + traefik.enable: "true" + +- name: "Start SBS server container" + community.docker.docker_container: + name: "sram-sbs-server" + image: "{{ sbs.server_image }}" + restart_policy: "always" + state: "started" + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + CONFIG: "/opt/sbs/server/config/config.yml" + REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt" + RUN_MIGRATIONS: "0" + pull: "never" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" + - "{{ sbs.log_dir }}:/opt/sbs/log" + - "/tmp/ci-runner:/tmp/ci-runner" + networks: + - name: "loadbalancer" + # TODO: fix this: this is only for dev + etc_hosts: + oidc-op.scz-vm.net: "172.20.1.24" + host.docker.internal: host-gateway diff --git a/roles/sram-sbs/templates/alembic.ini.j2 b/roles/sram-sbs/templates/alembic.ini.j2 new file mode 100644 index 000000000..9ccd51979 --- /dev/null +++ b/roles/sram-sbs/templates/alembic.ini.j2 @@ -0,0 +1,72 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = {{ sbs.db_connection_migration }} + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = NOTSET +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = DEBUG +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/roles/sram-sbs/templates/config.yml.j2 b/roles/sram-sbs/templates/config.yml.j2 new file mode 100644 index 000000000..7d4c92bf4 --- /dev/null +++ b/roles/sram-sbs/templates/config.yml.j2 @@ -0,0 +1,264 @@ +--- +database: + uri: {{ sbs.db_connection_sbs }} + +redis: +{% if env == 'test2' %} + uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" +{% else %} + uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" +{% endif %} + +# add a per-release suffix here to invalidate sessions on new releases +secret_key: {{ sbs.db_secret }}{{sbs.secret_key_suffix}} +# Must be a base64 encoded key of 128, 192, or 256 bits. +# Generate: base64.b64encode(os.urandom(256 // 8)).decode() +encryption_key: {{ sbs.encryption_key }} + +# Lifetime of session in minutes (one day is 60 * 24) +permanent_session_lifetime: {{ sbs.session_lifetime }} + +logging: + log_to_stdout: {{ sbs.log_to_stdout }} + +# Valid scopes are "READ" and "WRITE" +api_users: +{% for name, user in sbs.api_users.items() %} + - name: "{{ name }}" + password: "{{ user.password }}" + scopes: "[ {{ user.scopes | join(', ') }} ]" +{% endfor %} + +oidc: + client_id: "{{ sbs.oidc_client_id }}" + client_secret: "{{ sbs.oidc_client_secret }}" + audience: "{{ sbs.oidc_jwt_audience }}" + verify_peer: {{ sbs.oidc_verify_peer }} + authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" + token_endpoint: "{{ sbs.oidc_token_endpoint }}" + userinfo_endpoint: "{{ sbs.oidc_userinfo_endpoint }}" + jwks_endpoint: "{{ sbs.oidc_jwks_endpoint }}" + #Note that the paths for these uri's is hardcoded and only domain and port differ per environment + redirect_uri: "{{ sbs.oidc_redirect_uri }}" + continue_eduteams_redirect_uri: "{{ sbs.eduteams_continue_endpoint }}" + continue_eb_redirect_uri: "{{ sbs.eb_continue_endpoint }}" + second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} + totp_token_name: "{{ sbs.totp_token_name }}" + # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id + sram_service_entity_id: "{{ sbs.oidc_client_id }}" + scopes: {{ sbs.oidc_scopes }} + +base_scope: "{{ base_domain }}" +entitlement_group_namespace: "{{ sbs.urn_namespace }}" +eppn_scope: " {{ sbs.eppn_scope }}" +scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" +collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" + +{% if env == "prd" %} +environment_disclaimer: "" +{% else %} +environment_disclaimer: "{{ sbs.disclaimer_label }}" +{% endif %} + +# All services in the white list can be requested in the create-restricted-co API +# The default organisation is a fallback for when the administrator has no schac_home_org +restricted_co: + services_white_list: [ "https://cloud" ] + default_organisation: "{{ sbs.restricted_co_default_org }}" + +mail: + host: {{ sbs.mail_host }} + port: {{ sbs.mail_port }} + sender_name: {{ sbs.mail_sender_name }} + sender_email: {{ sbs.mail_sender_email }} + suppress_sending_mails: {{ sbs.suppress_mails }} + info_email: {{ sbs.support_email }} + beheer_email: {{ sbs.admin_email }} + ticket_email: {{ sbs.ticket_email }} + eduteams_email: {{ sbs.eduteams_email }} + # Do we mail a summary of new Organizations and Services to the beheer_email? + audit_trail_notifications_enabled: {{ sbs.audit_trail_notifications_enabled }} + account_deletion_notifications_enabled: True + send_exceptions: {{ sbs.send_exceptions }} + send_js_exceptions: {{ sbs.send_js_exceptions }} + send_exceptions_recipients: [ "{{ sbs.exceptions_mail }}" ] + environment: "{{ base_domain }}" + +manage: + enabled: {{ sbs.manage_base_enabled }} + # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl" + sram_rp_entity_id: "{{ sbs.manage_sram_rp_entity_id }}" + base_url: "{{ sbs.manage_base_url }}" + user: "{{ sbs.manage_user }}" + password: "{{ sbs.manage_password }}" + verify_peer: {{ sbs.manage_verify_peer }} + +aup: + version: 1 + url_aup_en: "https://edu.nl/6wb63" + url_aup_nl: "https://edu.nl/6wb63" + +base_url: {{ sbs.base_url }} +socket_url: {{ sbs.base_url }} +base_server_url: {{ sbs.base_url }} +wiki_link: {{ sbs.wiki_link }} + +admin_users: +{% for admin_user in sbs.admin_users %} + - uid: "{{ admin_user.uid }}" +{% endfor %} + +organisation_categories: + - "HBO" + - "MBO" + - "UMC" + - "University" + - "Research" + - "SURF" + +feature: + seed_allowed: {{ sbs.seed_allowed }} + api_keys_enabled: {{ sbs.api_keys_enabled }} + feedback_enabled: {{ sbs.feedback_enabled }} + impersonation_allowed: {{ sbs.impersonation_allowed }} + sbs_swagger_enabled: {{ sbs.swagger_enabled }} + admin_platform_backdoor_totp: {{ sbs.admin_platform_backdoor_totp }} + past_dates_allowed: {{ sbs.past_dates_allowed }} + mock_scim_enabled: {{ sbs.mock_scim_enabled }} + +metadata: + idp_url: "{{sbs.idp_metadata_url}}" + parse_at_startup: True + # No need for environment specific values + scope_override: + knaw.nl: "Koninklijke Nederlandse Akademie van Wetenschappen (KNAW)" + +platform_admin_notifications: + # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email? + enabled: False + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we include open join_requests in the summary + outstanding_join_request_days_threshold: 7 + # How long before we include open CO requests in the summary + outstanding_coll_request_days_threshold: 7 + +user_requests_retention: + # Do we daily check for CO join_requests and CO requests and delete approved and denied? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we delete approved / denied join_requests + outstanding_join_request_days_threshold: 90 + # How long before we delete approved / denied CO requests + outstanding_coll_request_days_threshold: 90 + +# The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link +retention: + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # how many days of inactivity before a user is suspended + # 0 allows for any last_login_date in the past to trigger suspension notification + allowed_inactive_period_days: {{ sbs.suspension_inactive_days }} + # how many days before suspension do we send a warning + # -1 will suspend notified users on second suspension cron + reminder_suspend_period_days: {{ sbs.suspension_reminder_days }} + # how many days after suspension do we delete the account + remove_suspended_users_period_days: 90 + # how many days before deletion do we send a reminder + reminder_expiry_period_days: 7 + # whether to send a notification of the result of the retention process to the beheer_email + admin_notification_mail: {{ sbs.suspension_notify_admin }} + +collaboration_expiration: + # Do we daily check for CO's that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired collaborations + expired_collaborations_days_threshold: 90 + # How many days before actual expiration do we mail the organisation members + expired_warning_mail_days_threshold: 10 + +collaboration_suspension: + # Do we daily check for CO's that will be suspended because of inactivity? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # After how many days of inactivity do we suspend collaborations + collaboration_inactivity_days_threshold: 365 + # How many days before actual suspension do we mail the organisation members + inactivity_warning_mail_days_threshold: 10 + # After how many days after suspension do we actually delete the collaboration + collaboration_deletion_days_threshold: 90 + +membership_expiration: + # Do we daily check for memberships that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired memberships + expired_memberships_days_threshold: 90 + # How many days before actual expiration do we mail the co admin and member + expired_warning_mail_days_threshold: 10 + +invitation_reminders: + # Do we daily check for invitations that need a reminder? + enabled: {{ sbs.invitation_reminders_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How many days before expiration of an invitation do we remind the user? + invitation_reminders_threshold: 5 + +invitation_expirations: + # Do we daily check for invitations that are expired / accepted and are eligible for deletion ? + enabled: {{ sbs.invitation_expirations_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration of an invitation do we delete the invitation? + nbr_days_remove_expired_invitations: 10 + # How long after expiration of an API created invitation do we delete the invitation? + nbr_days_remove_api_expired_invitations: 30 + +orphan_users: + # Do we daily check for users that are orphans soo they can be deleted? + enabled: {{ sbs.delete_orphaned }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after created do we delete orphan users + delete_days_threshold: 14 + +open_requests: + # Do we weekly check for all open requests? + enabled: {{ sbs.open_requests_enabled }} + cron_day_of_week: 1 + +scim_sweep: + # Do we enable scim sweeps? + enabled: {{ sbs.scim_sweep }} + # How often do we check if scim sweeps are needed per service + cron_minutes_expression: "*/15" + +ldap: + url: "{{ sbs.ldap_url }}" + bind_account: "{{ sbs.ldap_bind_account }}" + +# A MFA login in a different flow is valid for X minutes +mfa_sso_time_in_minutes: {{sbs.mfa_sso_minutes}} + +# whether to fall back to TOTP MFA +mfa_fallback_enabled: {{sbs.mfa_fallback_enabled}} + +# Lower case entity ID's and schac_home allowed skipping MFA. +# Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown +mfa_idp_allowed: {{sbs.mfa_idp_allowed}} + +# Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up +ssid_identity_providers: {{sbs.ssid_identity_providers}} + +ssid_config_folder: saml + +pam_web_sso: + session_timeout_seconds: 300 + +rate_limit_totp_guesses_per_30_seconds: 10 + +# The uid's of user that will never be suspended or deleted +excluded_user_accounts: +{% for excluded_user in sbs.excluded_users %} + - uid: "{{ excluded_user.uid }}" +{% endfor %} + +engine_block: + api_token: {{ sbs.engine_block_api_token }} diff --git a/roles/sram-sbs/templates/disclaimer.css.j2 b/roles/sram-sbs/templates/disclaimer.css.j2 new file mode 100644 index 000000000..7922f5e5b --- /dev/null +++ b/roles/sram-sbs/templates/disclaimer.css.j2 @@ -0,0 +1,6 @@ +{% if env!="prd" -%} +body::after { + background: {{ sbs.disclaimer_color }}; + content: "{{ sbs.disclaimer_label }}"; +} +{% endif %} diff --git a/roles/sram-sbs/templates/saml_advanced_settings.json.j2 b/roles/sram-sbs/templates/saml_advanced_settings.json.j2 new file mode 100644 index 000000000..bdde32050 --- /dev/null +++ b/roles/sram-sbs/templates/saml_advanced_settings.json.j2 @@ -0,0 +1,35 @@ +{ + "security": { + "nameIdEncrypted": false, + "authnRequestsSigned": true, + "logoutRequestSigned": false, + "logoutResponseSigned": false, + "signMetadata": false, + "wantMessagesSigned": false, + "wantAssertionsSigned": true, + "wantNameId" : true, + "wantNameIdEncrypted": false, + "wantAttributeStatement": false, + "wantAssertionsEncrypted": false, + "requestedAuthnContext": ["{{sbs_ssid_authncontext}}"], + "requestedAuthnContextComparison": "minimum", + "failOnAuthnContextMismatch": false, + "allowSingleLabelDomains": false, + "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", + "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256", + "rejectDeprecatedAlgorithm": true + }, + "contactPerson": { + "technical": { + "givenName": "{{ mail.admin_name }}", + "emailAddress": "{{ mail.admin_address }}" + } + }, + "organization": { + "en-US": { + "name": "{{ org.name }}", + "displayname": "{{ org.name }}", + "url": "{{ org.url }}" + } + } +} diff --git a/roles/sram-sbs/templates/saml_settings.json.j2 b/roles/sram-sbs/templates/saml_settings.json.j2 new file mode 100644 index 000000000..bb5788e97 --- /dev/null +++ b/roles/sram-sbs/templates/saml_settings.json.j2 @@ -0,0 +1,22 @@ +{ + "strict": true, + "debug": true, + "sp": { + "entityId": "{{ sbs_surf_secure_id.sp_entity_id }}", + "assertionConsumerService": { + "url": "{{ sbs_surf_secure_id.acs_url }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + }, + "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", + "x509cert": "{{ sbs_surf_secure_id.pub | barepem }}", + "privateKey": "{{ sbs_surf_secure_id.priv | barepem }}" + }, + "idp": { + "entityId": "{{ sbs_ssid_entityid }}", + "singleSignOnService": { + "url": "{{ sbs_ssid_sso_endpoint }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + "x509cert": "{{ sbs_surf_secure_id.sa_idp_certificate | barepem }}" + } +} diff --git a/roles/sram-sbs/templates/sbs-apache.conf.j2 b/roles/sram-sbs/templates/sbs-apache.conf.j2 new file mode 100644 index 000000000..f0140a845 --- /dev/null +++ b/roles/sram-sbs/templates/sbs-apache.conf.j2 @@ -0,0 +1,30 @@ +ServerName {{ sbs.base_domain }} +#ErrorLog /proc/self/fd/2 +#CustomLog /proc/self/fd/1 common +DocumentRoot /opt/sbs/client/dist + +Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;" +Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()" + +RewriteEngine On +RewriteCond %{REQUEST_URI} !^/(api|pam-weblogin|flasgger_static|swagger|health|config|info|socket.io) +RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f +RewriteRule ^/(.*)$ /index.html [L] + +ProxyRequests off +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sram-sbs-server:8080/ +ProxyPassReverse / http://sram-sbs-server:8080/ +ProxyPass /socket.io/ ws://sram-sbs-server:8080/socket.io/ +ProxyPassReverse /socket.io/ ws://sram-sbs-server:8080/socket.io/ + + + Header set Cache-Control: "public, max-age=31536000, immutable" + + + Header set Cache-Control: "no-cache, private" + + + + Require all granted + Options -Indexes + diff --git a/roles/sram-sbs/templates/sbs.service.j2 b/roles/sram-sbs/templates/sbs.service.j2 new file mode 100644 index 000000000..2920ddc8d --- /dev/null +++ b/roles/sram-sbs/templates/sbs.service.j2 @@ -0,0 +1,32 @@ +[Unit] +Description=SBS +After=network.target + +[Service] +DynamicUser=true +User=_sram_sbs +Group=_sram_sbs +SupplementaryGroups={{sbs_group}} + +WorkingDirectory={{sbs_git_dir}} +ReadWritePaths={{sbs_log_dir}} +NoNewPrivileges=true +PrivateTmp=true + +Environment="CONFIG=config/config.yml" +Environment="PROFILE=log_to_stdout" +# the python requests module uses the CAs provided by the certifi package by default +# we'll just take the OS-provided CAs, thankyouverymuch +Environment="REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt" + +Type=notify +ExecStart={{sbs_env_dir}}/bin/gunicorn --worker-class eventlet --workers {{sbs_num_workers}} --bind 127.0.0.1:8080 server.__main__:app + +Restart=on-failure +RestartSec=10 + +KillMode=mixed +TimeoutStopSec=5 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-sbs/vars/main.yml b/roles/sram-sbs/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-sbs/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext