From 46aad25f7c6b5c4bb5f8234270a196dda157bcce Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 19 Mar 2026 16:48:37 +0100 Subject: [PATCH 1/7] WIP --- roles/redis/defaults/main.yml | 11 + roles/redis/handlers/main.yml | 6 + roles/redis/tasks/main.yml | 61 ++++ roles/redis/templates/redis.conf.j2 | 3 + roles/redis/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 159 +++++++++++ roles/sbs/files/yarn.gpg | 243 ++++++++++++++++ roles/sbs/handlers/main.yml | 9 + roles/sbs/tasks/main.yml | 171 ++++++++++++ roles/sbs/templates/alembic.ini.j2 | 72 +++++ roles/sbs/templates/config.yml.j2 | 264 ++++++++++++++++++ roles/sbs/templates/disclaimer.css.j2 | 6 + .../templates/saml_advanced_settings.json.j2 | 35 +++ roles/sbs/templates/saml_settings.json.j2 | 22 ++ roles/sbs/templates/sbs-apache.conf.j2 | 30 ++ roles/sbs/templates/sbs.service.j2 | 32 +++ roles/sbs/vars/main.yml | 1 + 17 files changed, 1126 insertions(+) create mode 100644 roles/redis/defaults/main.yml create mode 100644 roles/redis/handlers/main.yml create mode 100644 roles/redis/tasks/main.yml create mode 100644 roles/redis/templates/redis.conf.j2 create mode 100644 roles/redis/vars/main.yml create mode 100644 roles/sbs/defaults/main.yml create mode 100644 roles/sbs/files/yarn.gpg create mode 100644 roles/sbs/handlers/main.yml create mode 100644 roles/sbs/tasks/main.yml create mode 100644 roles/sbs/templates/alembic.ini.j2 create mode 100644 roles/sbs/templates/config.yml.j2 create mode 100644 roles/sbs/templates/disclaimer.css.j2 create mode 100644 roles/sbs/templates/saml_advanced_settings.json.j2 create mode 100644 roles/sbs/templates/saml_settings.json.j2 create mode 100644 roles/sbs/templates/sbs-apache.conf.j2 create mode 100644 roles/sbs/templates/sbs.service.j2 create mode 100644 roles/sbs/vars/main.yml diff --git a/roles/redis/defaults/main.yml b/roles/redis/defaults/main.yml new file mode 100644 index 000000000..d4eb4b182 --- /dev/null +++ b/roles/redis/defaults/main.yml @@ -0,0 +1,11 @@ +--- +redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" +redis_defaults: + image: "docker.io/library/redis:7" + conf_dir: "{{ current_release_appdir }}/redis" + data_dir: "{{ current_release_appdir }}/redis/data" + user: redis + group: redis + redis_user: default + redis_password: changethispassword + max_memory: 100mb diff --git a/roles/redis/handlers/main.yml b/roles/redis/handlers/main.yml new file mode 100644 index 000000000..5ed78e133 --- /dev/null +++ b/roles/redis/handlers/main.yml @@ -0,0 +1,6 @@ +--- +- name: Restart redis container + community.docker.docker_container: + name: redis + state: started + restart: true diff --git a/roles/redis/tasks/main.yml b/roles/redis/tasks/main.yml new file mode 100644 index 000000000..65e7392ef --- /dev/null +++ b/roles/redis/tasks/main.yml @@ -0,0 +1,61 @@ +--- +- name: "Create redis group" + group: + name: "{{ redis.group }}" + state: "present" + register: "result" + +- name: "Save redis group gid" + set_fact: + redis_group_gid: "{{ result.gid }}" + +- name: "Create redis user" + user: + name: "{{ redis.user }}" + group: "{{ redis.group }}" + comment: "User to run SRAM Redis service" + shell: "/bin/false" + password: "!" + home: "{{ redis.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save redis user uid" + set_fact: + redis_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "{{item.mode}}" + with_items: + - { path: "{{redis.conf_dir}}", mode: "0755" } + - { path: "{{redis.data_dir}}", mode: "0755" } + +- name: "Create redis config" + template: + src: "redis.conf.j2" + dest: "{{ redis.conf_dir }}/redis.conf" + owner: "{{ redis.user }}" + group: "{{ redis.group }}" + mode: "0644" + notify: "Restart redis container" + +- name: "Create redis container" + community.docker.docker_container: + name: "redis" + image: "{{ redis.image }}" + restart_policy: "always" + state: "started" + user: "{{ redis_user_uid }}:{{ redis_group_gid }}" + command: | + redis-server /usr/local/etc/redis/redis.conf + volumes: + - "{{ redis.conf_dir }}:/usr/local/etc/redis" + - "{{ redis.data_dir }}:/data" + networks: + - name: loadbalancer diff --git a/roles/redis/templates/redis.conf.j2 b/roles/redis/templates/redis.conf.j2 new file mode 100644 index 000000000..ba231dc58 --- /dev/null +++ b/roles/redis/templates/redis.conf.j2 @@ -0,0 +1,3 @@ +user {{redis.redis_user}} on +@all ~* &* >{{redis.redis_password}} +maxmemory {{ redis.max_memory }} +maxmemory-policy allkeys-lru diff --git a/roles/redis/vars/main.yml b/roles/redis/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/redis/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml new file mode 100644 index 000000000..f21addf5f --- /dev/null +++ b/roles/sbs/defaults/main.yml @@ -0,0 +1,159 @@ +--- +sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" +sbs_defaults: + openidc_timeout: 86400 + sram_conf_dir: "{{ current_release_appdir }}/sram" + + work_dir: "{{ sram_conf_dir }}/sbs" + git_dir: "{{ sbs.work_dir }}/sbs" + env_dir: "{{ sbs.work_dir }}/sbs-env" + conf_dir: "{{ sbs.work_dir }}/config" + log_dir: "{{ sbs.work_dir }}/log" + cert_dir: "{{ sbs.work_dir }}/cert" + apache_conf: "{{ sbs.work_dir }}/sbs.conf" + nginx_conf: "{{ sbs.work_dir }}/nginx.conf" + + + db_name: "sbs" + db_user: "sbsrw" + dbbackup_user: "sbs_backupper" + migration_user: "sbs_migrater" + + db_connection: "\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ + ?ssl=true&charset=utf8mb4" + db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, sbs_db_password) }}" + db_connection_migration: "\ + {{ sbs_db_connection | format(sbs_migration_user, sbs_migration_password) }}" + + redis_host: redis + redis_port: 6379 + redis_ssl: false + redis_user: default + + mail_host: "{{ mail.relay_to }}" + mail_port: "{{ mail.relay_port }}" + + user: "sbs" + group: "sbs" + + session_lifetime: 1440 + secret_key_suffix: "" + + oidc_crypto_password: "CHANGEME" + uid_attribute: "sub" + + disclaimer_color: "#a29c13" + disclaimer_label: wsgi + + urn_namespace: "urn:example:sbs" + eppn_scope: "sbs.example.edu" + restricted_co_default_org: "example.org" + + mail_sender_name: "SURF" + mail_sender_email: "no-reply@localhost" + exceptions_mail: "root@localhost" + + support_email: "sram-support@localhost" + admin_email: "sram-beheer@localhost" + ticket_email: "sram-support@surf.nl" + eduteams_email: "eduteams@localhost" + + wiki_link: "https://www.example.org/wiki" + + backend_port: 8080 + num_workers: 2 + + cron_hour_of_day: 4 + seed_allowed: True + api_keys_enabled: True + feedback_enabled: True + audit_trail_notifications_enabled: True + send_exceptions: False + send_js_exceptions: False + second_factor_authentication_required: True + totp_token_name: "SRAM-example" + notifications_enabled: True + invitation_reminders_enabled: True + invitation_expirations_enabled: True + open_requests_enabled: True + scim_sweep: False + impersonation_allowed: True + admin_platform_backdoor_totp: True + past_dates_allowed: True + mock_scim_enabled: True + log_to_stdout: True + + delete_orphaned: True + suspension_inactive_days: 365 + suspension_reminder_days: 14 + suspension_notify_admin: False + + oidc_config_url: "http://localhost/.well-known/openid-configuration" + oidc_authz_endpoint: "http://localhost/OIDC/authorization" + oidc_token_endpoint: "http://localhost/OIDC/token" + oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" + oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" + oidc_redirect_uri: "https://sbs.scz-vm.net/api/users/resume-session" + mfa_idp_allowed: false + eduteams_continue_endpoint: "https://localhost/continue" + eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" + oidc_jwt_audience: "https://localhost" + continue_eduteams_redirect_uri: "https://localhost/continue" + oidc_verify_peer: False + oidc_scopes: + - openid + + manage_base_enabled: False + manage_base_url: "https://manage.test2.surfconext.nl" + manage_sram_rp_entity_id: "sbs.test2.sram.surf.nl" + manage_verify_peer: False + + idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " + backup_dir: "{{backup_base}}/sbs" + + swagger_enabled: true + + ssid_identity_providers: [] + surf_secure_id: + environment: "unknown.example.org" + sp_entity_id: "https://sbs.{{base_domain}}" + acs_url: "https://{{base_domain}}/api/users/acs" + sa_gw_environment: "sa-gw.unknown.example.org" + sa_idp_certificate: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + priv: | + -----BEGIN RSA PRIVATE KEY----- + abcde + -----END RSA PRIVATE KEY----- + pub: | + -----BEGIN CERTIFICATE----- + 12345 + -----END CERTIFICATE----- + + ssid_authncontext: "\ + http://{{ sbs.surf_secure_id.environment }}/assurance/sfo-level2" + ssid_entityid: "\ + https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" + ssid_sso_endpoint: "\ + https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + + mfa_sso_minutes: 10 + mfa_fallback_enabled: true + + ldap_url: "ldap://ldap.example.com/dc=example,dc=com" + ldap_bind_account: "cn=admin,dc=entity_id,dc=services,dc=sram-tst,dc=surf,dc=nl" + + csp_style_hashes: + - 'sha256-0+ANsgYUJdh56RK8gGvTF2vnriYqvFHfWqtA8xXa+bA=' + - 'sha256-3SnfHQolDHbZMbDAPmhrZf1keHiXfj/KJyh2phhFAAY=' + - 'sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=' + - 'sha256-Ng6y+QCkPChG4Q49SIfXB5ToIDcDhITtQNFkDBPpCTw=' + - 'sha256-orBPipbqpMvkNi+Z+m6qEn0XS6ymmAQE6+FwCNs1FbQ=' + - 'sha256-vFt3L2qLqpJmRpcXGbYr2UVSmgSp9VCUzz2lnqWIATw=' + - 'sha256-SU3XCwbQ/8qgzoGOWCYdkwIr3xRrl5rsvdFcpw8NSiE=' # on /new-service-request + - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request + + engine_block_api_token: secret diff --git a/roles/sbs/files/yarn.gpg b/roles/sbs/files/yarn.gpg new file mode 100644 index 000000000..3e9e7d155 --- /dev/null +++ b/roles/sbs/files/yarn.gpg @@ -0,0 +1,243 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- +Version: GnuPG v1 + +mQINBFf0j5oBEADS6cItqCbf4lOLICohq2aHqM5I1jsz3DC4ddIU5ONbKXP1t0wk +FEUPRzd6m80cTo7Q02Bw7enh4J6HvM5XVBSSGKENP6XAsiOZnY9nkXlcQAPFRnCn +CjEfoOPZ0cBKjn2IpIXXcC+7xh4p1yruBpOsCbT6BuzA+Nm9j4cpRjdRdWSSmdID +TyMZClmYm/NIfCPduYvNZxZXhW3QYeieP7HIonhZSHVu/jauEUyHLVsieUIvAOJI +cXYpwLlrw0yy4flHe1ORJzuA7EZ4eOWCuKf1PgowEnVSS7Qp7lksCuljtfXgWelB +XGJlAMD90mMbsNpQPF8ywQ2wjECM8Q6BGUcQuGMDBtFihobb+ufJxpUOm4uDt0y4 +zaw+MVSi+a56+zvY0VmMGVyJstldPAcUlFYBDsfC9+zpzyrAqRY+qFWOT2tj29R5 +ZNYvUUjEmA/kXPNIwmEr4oj7PVjSTUSpwoKamFFE6Bbha1bzIHpdPIRYc6cEulp3 +dTOWfp+Cniiblp9gwz3HeXOWu7npTTvJBnnyRSVtQgRnZrrtRt3oLZgmj2fpZFCE +g8VcnQOb0iFcIM7VlWL0QR4SOz36/GFyezZkGsMlJwIGjXkqGhcEHYVDpg0nMoq1 +qUvizxv4nKLanZ5jKrV2J8V09PbL+BERIi6QSeXhXQIui/HfV5wHXC6DywARAQAB +tBxZYXJuIFBhY2thZ2luZyA8eWFybkBkYW4uY3g+iQI5BBMBCAAjBQJX9I+aAhsD +BwsJCAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQFkawG4blAxB52Q/9FcyGIEK2 +QamDhookuoUGGYjIeN+huQPWmc6mLPEKS2Vahk5jnJKVtAFiaqINiUtt/1jZuhF2 +bVGITvZK79kM6lg42xQcnhypzQPgkN7GQ/ApYqeKqCh1wV43KzT/CsJ9TrI0SC34 +qYHTEXXUprAuwQitgAJNi5QMdMtauCmpK+Xtl/72aetvL8jMFElOobeGwKgfLo9+ +We2EkKhSwyiy3W5TYI1UlV+evyyT+N0pmhRUSH6sJpzDnVYYPbCWa2b+0D/PHjXi +edKcely/NvqyVGoWZ+j41wkp5Q0wK2ybURS1ajfaKt0OcMhRf9XCfeXAQvU98mEk +FlfPaq0CXsjOy8eJXDeoc1dwxjDi2YbfHel0CafjrNp6qIFG9v3JxPUU19hG9lxD +Iv7VXftvMpjJCo/J4Qk+MOv7KsabgXg1iZHmllyyH3TY4AA4VA+mlceiiOHdXbKk +Q3BfS1jdXPV+2kBfqM4oWANArlrFTqtop8PPsDNqh/6SrVsthr7WTvC5q5h/Lmxy +Krm4Laf7JJMvdisfAsBbGZcR0Xv/Vw9cf2OIEzeOWbj5xul0kHT1vHhVNrBNanfe +t79RTDGESPbqz+bTS7olHWctl6TlwxA0/qKlI/PzXfOg63Nqy15woq9buca+uTcS +ccYO5au+g4Z70IEeQHsq5SC56qDR5/FvYyu5Ag0EV/SPmgEQANDSEMBKp6ER86y+ +udfKdSLP9gOv6hPsAgCHhcvBsks+ixeX9U9KkK7vj/1q6wodKf9oEbbdykHgIIB1 +lzY1l7u7/biAtQhTjdEZPh/dt3vjogrJblUEC0rt+fZe325ociocS4Bt9I75Ttkd +nWgkE4uOBJsSllpUbqfLBfYR58zz2Rz1pkBqRTkmJFetVNYErYi2tWbeJ59GjUN7 +w1K3GhxqbMbgx4dF5+rjGs+KI9k6jkGeeQHqhDk+FU70oLVLuH2Dmi9IFjklKmGa +3BU7VpNxvDwdoV7ttRYEBcBnPOmL24Sn4Xhe2MDCqgJwwyohd9rk8neV7GtavVea +Tv6bnzi1iJRgDld51HFWG8X+y55i5cYWaiXHdHOAG1+t35QUrczm9+sgkiKSk1II +TlEFsfwRl16NTCMGzjP5kGCm/W+yyyvBMw7CkENQcd23fMsdaQ/2UNYJau2PoRH/ +m+IoRehIcmE0npKeLVTDeZNCzpmfY18T542ibK49kdjZiK6G/VyBhIbWEFVu5Ll9 ++8GbcO9ucYaaeWkFS8Hg0FZafMk59VxKiICKLZ5he/C4f0UssXdyRYU6C5BH8UTC +QLg0z8mSSL+Wb2iFVPrn39Do7Zm8ry6LBCmfCf3pI99Q/1VaLDauorooJV3rQ5kC +JEiAeqQtLOvyoXIex1VbzlRUXmElABEBAAGJAh8EGAEIAAkFAlf0j5oCGwwACgkQ +FkawG4blAxAUUQ//afD0KLHjClHsA/dFiW+5qVzI8kPMHwO1QcUjeXrB6I3SluOT +rLSPhOsoS72yAaU9hFuq8g9ecmFrl3Skp/U4DHZXioEmozyZRp7eVsaHTewlfaOb +6g7+v52ktYdomcp3BM5v/pPZCnB5rLrH2KaUWbpY6V6tqtCHbF7zftDqcBENJDXf +hiCqS19J08GZFjDEqGDrEj3YEmEXZMN7PcXEISPIz6NYI6rw4yVH8AXfQW6vpPzm +ycHwI0QsVW2NQdcZ6zZt+phm6shNUbN2iDdg3BJICmIvQf8qhO3bOh0Bwc11FLHu +MKuGVxnWN82HyIsuUB7WDLBHEOtg61Zf1nAF1PQK52YuQz3EWI4LL9OqVqfSTY1J +jqIfj+u1PY2UHrxZfxlz1M8pXb1grozjKQ5aNqBKRrcMZNx71itR5rv18qGjGR2i +Sciu/xah7zAroEQrx72IjYt03tbk/007CvUlUqFIFB8kY1bbfX8JAA+TxelUniUR +2CY8eom5HnaPpKE3kGXZ0jWkudbWb7uuWcW1FE/bO+VtexpBL3SoXmwbVMGnJIEi +Uvy8m6ez0kzLXzJ/4K4b8bDO4NjFX2ocKdzLA89Z95KcZUxEG0O7kaDCu0x3BEge +uArJLecD5je2/2HXAdvkOAOUi6Gc/LiJrtInc0vUFsdqWCUK5Ao/MKvdMFW5Ag0E +V/SP2AEQALRcYv/hiv1n3VYuJbFnEfMkGwkdBYLGo3hiHKY8xrsFVePl9SkL8aqd +C310KUFNI42gGY/lz54RUHOqfMszTdafFrmwU18ECWGo4oG9qEutIKG7fkxcvk2M +tgsOMZFJqVDS1a9I4QTIkv1ellLBhVub9S7vhe/0jDjXs9IyOBpYQrpCXAm6SypC +fpqkDJ4qt/yFheATcm3s8ZVTsk2hiz2jnbqfvpte3hr3XArDjZXr3mGAp3YY9JFT +zVBOhyhT/92e6tURz8a/+IrMJzhSyIDel9L+2sHHo9E+fA3/h3lg2mo6EZmRTuvE +v9GXf5xeP5lSCDwS6YBXevJ8OSPlocC8Qm8ziww6dy/23XTxPg4YTkdf42i7VOpS +pa7EvBGne8YrmUzfbrxyAArK05lo56ZWb9ROgTnqM62wfvrCbEqSHidN3WQQEhMH +N7vtXeDPhAd8vaDhYBk4A/yWXIwgIbMczYf7Pl7oY3bXlQHb0KW/y7N3OZCr5mPW +94VLLH/v+T5R4DXaqTWeWtDGXLih7uXrG9vdlyrULEW+FDSpexKFUQe83a+Vkp6x +GX7FdMC9tNKYnPeRYqPF9UQEJg+MSbfkHSAJgky+bbacz+eqacLXMNCEk2LXFV1B +66u2EvSkGZiH7+6BNOar84I3qJrU7LBD7TmKBDHtnRr9JXrAxee3ABEBAAGJBEQE +GAEIAA8FAlf0j9gCGwIFCQHhM4ACKQkQFkawG4blAxDBXSAEGQEIAAYFAlf0j9gA +CgkQ0QH3iZ1B88PaoA//VuGdF5sjxRIOAOYqXypOD9/Kd7lYyxmtCwnvKdM7f8O5 +iD8oR2Pk1RhYHjpkfMRVjMkaLfxIRXfGQsWfKN2Zsa4zmTuNy7H6X26XW3rkFWpm +dECz1siGRvcpL6NvwLPIPQe7tST72q03u1H7bcyLGk0sTppgMoBND7yuaBTBZkAO +WizR+13x7FV+Y2j430Ft/DOe/NTc9dAlp6WmF5baOZClULfFzCTf9OcS2+bo68oP +gwWwnciJHSSLm6WRjsgoDxo5f3xBJs0ELKCr4jMwpSOTYqbDgEYOQTmHKkX8ZeQA +7mokc9guA0WK+DiGZis85lU95mneyJ2RuYcz6/VDwvT84ooe1swVkC2palDqBMwg +jZSTzbcUVqZRRnSDCe9jtpvF48WK4ZRiqtGO6Avzg1ZwMmWSr0zHQrLrUMTq/62W +KxLyj2oPxgptRg589hIwXVxJRWQjFijvK/xSjRMLgg73aNTq6Ojh98iyKAQ3HfzW +6iXBLLuGfvxflFednUSdWorr38MspcFvjFBOly+NDSjPHamNQ2h19iHLrYT7t4ve +nU9PvC+ORvXGxTN8mQR9btSdienQ8bBuU/mg/c417w6WbY7tkkqHqUuQC9LoaVdC +QFeE/SKGNe+wWN/EKi0QhXR9+UgWA41Gddi83Bk5deuTwbUeYkMDeUlOq3yyemcG +VxAA0PSktXnJgUj63+cdXu7ustVqzMjVJySCKSBtwJOge5aayonCNxz7KwoPO34m +Gdr9P4iJfc9kjawNV79aQ5aUH9uU2qFlbZOdO8pHOTjy4E+J0wbJb3VtzCJc1Eaa +83kZLFtJ45Fv2WQQ2Nv3Fo+yqAtkOkaBZv9Yq0UTaDkSYE9MMzHDVFx11TT21NZD +xu2QiIiqBcZfqJtIFHN5jONjwPG08xLAQKfUNROzclZ1h4XYUT+TWouopmpNeay5 +JSNcp5LsC2Rn0jSFuZGPJ1rBwB9vSFVA/GvOj8qEdfhjN3XbqPLVdOeChKuhlK0/ +sOLZZG91SHmT5SjP2zM6QKKSwNgHX4xZt4uugSZiY13+XqnrOGO9zRH8uumhsQmI +eFEdT27fsXTDTkWPI2zlHTltQjH1iebqqM9gfa2KUt671WyoL1yLhWrgePvDE+He +r002OslvvW6aAIIBki3FntPDqdIH89EEB4UEGqiA1eIZ6hGaQfinC7/IOkkm/mEa +qdeoI6NRS521/yf7i34NNj3IaL+rZQFbVWdbTEzAPtAs+bMJOHQXSGZeUUFrEQ/J +ael6aNg7mlr7cacmDwZWYLoCfY4w9GW6JHi6i63np8EA34CXecfor7cAX4XfaokB +XjyEkrnfV6OWYS7f01JJOcqYANhndxz1Ph8bxoRPelf5q+W5Ag0EWBU7dwEQAL1p +wH4prFMFMNV7MJPAwEug0Mxf3OsTBtCBnBYNvgFB+SFwKQLyDXUujuGQudjqQPCz +/09MOJPwGCOi0uA0BQScJ5JAfOq33qXi1iXCj9akeCfZXCOWtG3Izc3ofS6uee7K +fWUF1hNyA3PUwpRtM2pll+sQEO3y/EN7xYGUOM0mlCawrYGtxSNMlWBlMk/y5HK9 +upz+iHwUaEJ4PjV+P4YmDq0PnPvXE4qhTIvxx0kO5oZF0tAJCoTg1HE7o99/xq9Z +rejDR1JJj6btNw1YFQsRDLxRZv4rL9He10lmLhiQE8QN7zOWzyJbRP++tWY2d2zE +yFzvsOsGPbBqLDNkbb9d8Bfvp+udG13sHAEtRzI2UWe5SEdVHobAgu5l+m10WlsN +TG/L0gJe1eD1bwceWlnSrbqw+y+pam9YKWqdu18ETN6CeAbNo4w7honRkcRdZyoG +p9zZf3o1bGBBMla6RbLuJBoRDOy2Ql7B+Z87N0td6KlHI6X8fNbatbtsXR7qLUBP +5oRb6nXX4+DnTMDbvFpE2zxnkg+C354Tw5ysyHhM6abB2+zCXcZ3holeyxC+BUrO +gGPyLH/s01mg2zmttwC1UbkaGkQ6SwCoQoFEVq9Dp96B6PgZxhEw0GMrKRw53LoX +4rZif9Exv6qUFsGY8U9daEdDPF5UHYe7t/nPpfW3ABEBAAGJBD4EGAEIAAkFAlgV +O3cCGwICKQkQFkawG4blAxDBXSAEGQEIAAYFAlgVO3cACgkQRsITDf0kl/VynQ/+ +P3Vksu4fno26vA7ml9bzV3mu/X/gzU1HqySqYv9Zwzk2o512Z4QkoT/8lRepIG7v +AFRQzPn56Pz/vpMfiMDaf6thxs8wpv4y3m+rcQIQKO4sN3wwFPPbvM8wGoY6fGav +IkLKKIXy1BpzRGltGduf0c29+ycvzccQpyuTrZk4Zl73kLyBS8fCt+MZWejMMolD +uuLJiHbXci6+Pdi3ImabyStbNnJYmSyruNHcLHlgIbyugTiAcdTy0Bi/z8MfeYwj +VAwEkX4b2NwtuweYLzupBOTv0SqYCmBduZObkS5LHMZ+5Yh9Hfrd04uMdO5cIiy0 +AsGehTRC3Xyaea7Qk993rNcGEzX7LNB1GB2BXSq9FYPb+q0ewf8k8Lr9E0WG0dvD +OaJSkSGedgdA1QzvTgpAAkVWsXlksShVf4NVskxNUGDRaPLeRB+IV/5jO+kRsFuO +g5Tlkn6cgu1+Bn5gIfv0ny9K7TeC697gRQIcK8db1t8XidgSKbRmsSYEaRCy3c9x +w2/N7DLU/Js3gV8FUd7cZpaYN+k/erMdyfqLA7oFd+HLbA5Du/971yF8/6Bof8zp +jB9+QPRIARpcROEcQXz09dtl8wW8M0r09xpna+0Jk6JxF+stD97+hzikQXIxUtCX +j35ps9USSxv1cuz0MaFdWGW13OugtN4bQ2DNgelbTDUEKg//YTbBl9oGYQxHv9S5 +qvZVNvV3DuI18E5VW5ddyo/JfW24+Tukli/ZjPQYnMOP86nnIqo/LPGb4nV1uWL4 +KhmOCbH7t43+TkAwdwoxLjYP7iOqQp9VRPFjomUfvtmLjHp4r3cVEt5QeJEZLiSC +zSKMjPKqRMo5nNs3Et+/FyWCMRYdSggwhBfkbKKo44H9pmL3bTLqyir7EJAcArla +zjKMyZqRsK3gZfQgoASN5xAhemVWHnnecVSAqrOW599EBkc7Kf6lXjTVHtHN02vX +YYRZ16zrEjrfwb23LR+lAxSfWxLDovKLBg2SPbpduEv1GxyEFgF7v9fco4aQbuh/ +fOGvA8nuXkC5nI6ukw4c4zwmJ5+SNQthFUYKWLd4hR4qrCoJkMEWZmsCRtqxjVCJ +/i9ygRJHOGAWaam7bS+U7pdmq2mgF+qTxb2vX6mSzI3q3M7drGUA3EdaZo1hPA5u +kWi7tMCGqPQmtUFRnUvHPzCDuXLYT8lRxhTxDi3T5MXdIUlAUTcNpwG8Ill0xkGc +pMlh0D5p44GEdMFfJiXw6AUETHcqC2qZr2rP9kpzvVlapIrsPRg/DU+s70YnccI3 +iMCVm4/WrghFeK232zkjiwRVOm+IEWBlDFrm4MMjfguUeneYbK9WhqJnss9nc4QK +Vhzuyn3GTtg1w/T6CaYVXBjcHFmJBEQEGAEIAA8CGwIFAlokZSMFCQQWmKMCKcFd +IAQZAQgABgUCWBU7dwAKCRBGwhMN/SSX9XKdD/4/dWSy7h+ejbq8DuaX1vNXea79 +f+DNTUerJKpi/1nDOTajnXZnhCShP/yVF6kgbu8AVFDM+fno/P++kx+IwNp/q2HG +zzCm/jLeb6txAhAo7iw3fDAU89u8zzAahjp8Zq8iQsoohfLUGnNEaW0Z25/Rzb37 +Jy/NxxCnK5OtmThmXveQvIFLx8K34xlZ6MwyiUO64smIdtdyLr492LciZpvJK1s2 +cliZLKu40dwseWAhvK6BOIBx1PLQGL/Pwx95jCNUDASRfhvY3C27B5gvO6kE5O/R +KpgKYF25k5uRLkscxn7liH0d+t3Ti4x07lwiLLQCwZ6FNELdfJp5rtCT33es1wYT +Nfss0HUYHYFdKr0Vg9v6rR7B/yTwuv0TRYbR28M5olKRIZ52B0DVDO9OCkACRVax +eWSxKFV/g1WyTE1QYNFo8t5EH4hX/mM76RGwW46DlOWSfpyC7X4GfmAh+/SfL0rt +N4Lr3uBFAhwrx1vW3xeJ2BIptGaxJgRpELLdz3HDb83sMtT8mzeBXwVR3txmlpg3 +6T96sx3J+osDugV34ctsDkO7/3vXIXz/oGh/zOmMH35A9EgBGlxE4RxBfPT122Xz +BbwzSvT3Gmdr7QmTonEX6y0P3v6HOKRBcjFS0JePfmmz1RJLG/Vy7PQxoV1YZbXc +66C03htDYM2B6VtMNQkQFkawG4blAxCiVRAAhq/1L5YlsmItiC6MROtPP+lfAWRm +MSkoIuAtzkV/orqPetwWzjYLgApOvVXBuf9FdJ5vAx1IXG3mDx6mQQWkr4t9onwC +UuQ7lE29qmvCHB3FpKVJPKiGC6xK38t5dGAJtbUMZBQb1vDuQ7new8dVLzBSH1VZ +7gx9AT+WEptWznb1US1AbejO0uT8jsVc/McK4R3LQmVy9+hbTYZFz1zCImuv9SCN +ZPSdLpDe41QxcMfKiW7XU4rshJULKd4HYG92KjeJU80zgCyppOm85ENiMz91tPT7 ++A4O7XMlOaJEH8t/2SZGBE/dmHjSKcWIpJYrIZKXTrNv7rSQGvweNG5alvCAvnrL +J2cRpU1Rziw7auEU1YiSse+hQ1ZBIzWhPMunIdnkL/BJunBTVE7hPMMG7alOLy5Z +0ikNytVewasZlm/dj5tEsfvF7tisVTZWVjWCvEMTP5fecNMEAwbZdBDyQBAN00y7 +xp4Pwc/kPLuaqESyTTt8jGek/pe7/+6fu0GQmR2gZKGagAxeZEvXWrxSJp/q81XS +QGcO6QYMff7VexY3ncdjSVLro+Z3ZtYt6aVIGAEEA5UE341yCGIeN+nr27CXD4fH +F28aPh+AJzYh+uVjQhHbL8agwcyCMLgU88u1U0tT5Qtjwnw+w+3UNhROvn495REp +eEwD60iVeiuF5FW5Ag0EWbWWowEQALCiEk5Ic40W7/v5hqYNjrRlxTE/1axOhhzt +8eCB7eOeNOMQKwabYxqBceNmol/guzlnFqLtbaA6yZQkzz/K3eNwWQg7CfXO3+p/ +dN0HtktPfdCk+kY/t7StKRjINW6S9xk9KshiukmdiDq8JKS0HgxqphBB3tDjmo6/ +RiaOEFMoUlXKSU+BYYpBpLKg53P8F/8nIsK2aZJyk8XuBd0UXKI+N1gfCfzoDWnY +Hs73LQKcjrTaZQauT81J7+TeWoLI28vkVxyjvTXAyjSBnhxTYfwUNGSoawEXyJ1u +KCwhIpklxcCMI9Hykg7sKNsvmJ4uNcRJ7cSRfb0g5DR9dLhR+eEvFd+o4PblKk16 +AI48N8Zg1dLlJuV2cAtl0oBPk+tnbZukvkS5n1IzTSmiiPIXvK2t506VtfFEw4iZ +rJWf2Q9//TszBM3r1FPATLH7EAeG5P8RV+ri7L7NvzP6ZQClRDUsxeimCSe8v/t0 +OpheCVMlM9TpVcKGMw8ig/WEodoLOP4iqBs4BKR7fuydjDqbU0k/sdJTltp7IIdK +1e49POIQ7pt+SUrsq/HnPW4woLC1WjouBWyr2M7/a0SldPidZ2BUAK7O9oXosidZ +MJT7dBp3eHrspY4bdkSxsd0nshj0ndtqNktxkrSFRkoFpMz0J/M3Q93CjdHuTLpT +HQEWjm/7ABEBAAGJBEQEGAEIAA8FAlm1lqMCGwIFCQJ2LQACKQkQFkawG4blAxDB +XSAEGQEIAAYFAlm1lqMACgkQ4HTRbrb/TeMpDQ//eOIsCWY2gYOGACw42JzMVvuT +DrgRT4hMhgHCGeKzn1wFL1EsbSQV4Z6pYvnNayuEakgIz14wf4UFs5u1ehfBwatm +akSQJn32ANcAvI0INAkLEoqqy81mROjMc9FFrOkdqjcN7yN0BzH9jNYL/gsvmOOw +Ou+dIH3C1Lgei844ZR1BZK1900mohuRwcji0sdROMcrKrGjqd4yb6f7yl0wbdAxA +3IHT3TFGczC7Y41P2OEpaJeVIZZgxkgQsJ14qK/QGpdKvmZAQpjHBipeO/H+qxyO +T5Y+f15VLWGOOVL090+ZdtF7h3m4X2+L7xWsFIgdOprfO60gq3e79YFfgNBYU5BG +tJGFGlJ0sGtnpzx5QCRka0j/1E5lIu00sW3WfGItFd48hW6wHCloyoi7pBR7xqSE +oU/U5o7+nC8wHFrDYyqcyO9Q3mZDw4LvlgnyMOM+qLv/fNgO9USE4T30eSvc0t/5 +p1hCKNvyxHFghdRSJqn70bm6MQY+kd6+B/k62Oy8eCwRt4PR+LQEIPnxN7xGuNpV +O1oMyhhO41osYruMrodzw81icBRKYFlSuDOQ5jlcSajc6TvF22y+VXy7nx1q/CN4 +tzB/ryUASU+vXS8/QNM6qI/QbbgBy7VtHqDbs2KHp4cP0j9KYQzMrKwtRwfHqVrw +FLkCp61EHwSlPsEFiglpMg/8DQ92O4beY0n7eSrilwEdJg89IeepTBm1QYiLM33q +WLR9CABYAIiDG7qxviHozVfX6kUwbkntVpyHAXSbWrM3kD6jPs3u/dimLKVyd29A +VrBSn9FC04EjtDWsj1KB7HrFN4oo9o0JLSnXeJb8FnPf3MitaKltvj/kZhegozIs ++zvpzuri0LvoB4fNA0T4eAmxkGkZBB+mjNCrUHIakyPZVzWGL0QGsfK1Q9jvw0OE +rqHJYX8A1wLre/HkBne+e5ezS6Mc7kFW33Y1arfbHFNAe12juPsOxqK76qNilUbQ +pPtNvWP3FTpbkAdodMLq/gQ+M5yHwPe8SkpZ8wYCfcwEemz/P+4QhQB8tbYbpcPx +J+aQjVjcHpsLdrlSY3JL/gqockR7+97GrCzqXbgvsqiWr16Zyn6mxYWEHn9HXMh3 +b+2IYKFFXHffbIBq/mfibDnZtQBrZpn2uyh6F2ZuOsZh0LTD7RL53KV3fi90nS00 +Gs1kbMkPycL1JLqvYQDpllE2oZ1dKDYkwivGyDQhRNfERL6JkjyiSxfZ2c84r2HP +gnJTi/WBplloQkM+2NfXrBo6kLHSC6aBndRKk2UmUhrUluGcQUyfzYRFH5kVueIY +fDaBPus9gb+sjnViFRpqVjefwlXSJEDHWP3Cl2cuo2mJjeDghj400U6pjSUW3bIC +/PK5Ag0EXCxEEQEQAKVjsdljwPDGO+48879LDa1d7GEu/Jm9HRK6INCQiSiS/0mH +keKa6t4DRgCY2ID9lFiegx2Er+sIgL0chs16XJrFO21ukw+bkBdm2HYUKSsUFmr/ +bms8DkmAM699vRYVUAzO9eXG/g8lVrAzlb3RT7eGHYKd15DT5KxXDQB+T+mWE9qD +5RJwEyPjSU+4WjYF+Rr9gbSuAt5UySUb9jTR5HRNj9wtb4YutfP9jbfqy8esQVG9 +R/hpWKb2laxvn8Qc2Xj93qNIkBt/SILfx9WDJl0wNUmu+zUwpiC2wrLFTgNOpq7g +9wRPtg5mi8MXExWwSF2DlD54yxOOAvdVACJFBXEcstQ3SWg8gxljG8eLMpDjwoIB +ax3DZwiYZjkjJPeydSulh8vKoFBCQkf2PcImXdOk2HqOV1L7FROM6fKydeSLJbx1 +7SNjVdQnq1OsyqSO0catAFNptMHBsN+tiCI29gpGegaoumV9cnND69aYvyPBgvdt +mzPChjSmc6rzW1yXCJDm2qzwm/BcwJNXW5B3EUPxc0qSWste9fUna0G4l/WMuaIz +VkuTgXf1/r9HeQbjtxAztxH0d0VgdHAWPDkUYmztcZ4sd0PWkVa18qSrOvyhI96g +CzdvMRLX17m1kPvP5PlPulvqizjDs8BScqeSzGgSbbQVm5Tx4w2uF4/n3FBnABEB +AAGJBEQEGAECAA8FAlwsRBECGwIFCQIKEgACKQkQFkawG4blAxDBXSAEGQECAAYF +AlwsRBEACgkQI+cWZ4i2Ph6B0g//cPis3v2M6XvAbVoM3GIMXnsVj1WAHuwA/ja7 +UfZJ9+kV/PiMLkAbW0fBj0/y0O3Ry12VVQGXhC+Vo4j6C8qwFP4OXa6EsxHXuvWM +IztBaX1Kav613aXBtxp6tTrud0FFUh4sDc1RREb3tMr6y5cvFJgnrdWcX1gsl6OD +cgWBGNc6ZX7H7j48hMR6KmNeZocW7p8W+BgDQJqXYwVNL15qOHzVAh0dWsFLE9gw +BTmDCY03x9arxSNDGCXyxt6E77LbNVIoSRlEbkvi6j33nEbuERICYl6CltXQCyiV +KjheJcLMjbgv5+bLCv2zfeJ/WyOmOGKpHRu+lBV1GvliRxUblVlmjWPhYPBZXGyj +II16Tqr+ilREcZFW+STccbrVct75JWLbxwlEmix+W1HwSRCR+KHx3Cur4ZPMOBlP +sFilOOsNa7ROUB56t7zv21Ef3BeeaCd9c4kzNGN8d1icEqSXoWWPqgST0LZPtZyq +WZVnWrHChVHfrioxhSnw8O3wY1A2GSahiCSvvjvOeEoJyU21ZMw6AVyHCh6v42oY +adBfGgFwNo5OCMhNxNy/CcUrBSDqyLVTM5QlNsT75Ys7kHHnc+Jk+xx4JpiyNCz5 +LzcPhlwpqnJQcjJdY1hDhK75Ormj/NfCMeZ8g1aVPX4xEq8AMyZYhZ5/lmM+13Rd +v8ZW6FK7HQ/+IAKzntxOjw0MzCXkksKdmIOZ2bLeOVI8aSLaUmoT5CLuoia9g7iF +HlYrSY+01riRrAaPtYx0x8onfyVxL9dlW/Fv5+qc1fF5FxdhyIgdqgzm82TnXHu/ +haUxYmUvNrbsmmNl5UTTOf+YQHMccKFdYfZ2rCBtbN2niXG1tuz2+k83pozu4mJ1 +rOOLNAsQoY3yR6OODte1FyOgp7blwDhTIoQb8/UiJ7CMBI3OPrfoXFAnhYoxeRSA +N4UFu9/HIkqfaQgRPCZS1gNerWF6r6yz9AZWUZqjSJssjBqXCtK9bGbTYBZk+pw3 +H9Nd0RJ2WJ9qPqmlmUr1wdqct0ChsJx1xAT86QrssicJ/HFFmF45hlnGkHUBWLaV +Jt8YkLb/DqOIbVbwyCLQtJ80VQLEeupfmu5QNsTpntRYNKf8cr00uc8vSYXYFRxa +5H5oRT1eoFEEjDDvokNnHXfT+Hya44IjYpzaqvAgeDp6sYlOdtWIv/V3s+trxACw +TkRN7zw3lLTbT8PK9szK0fYZ5KHG1/AKH+mbZ6qNc/25PNbAFRtttLGuEIC3HJ12 +IAp2JdjioeD2OnWLu4ZeCT2CKKFsleZPrSyCrn3gyZPmfYvv5h2JbQNO6uweOrZE +NWX5SU43OBoplbuKJZsMP6p6NahuGnIeJLlv509JYAf/HN4ARyvvOpO5Ag0EXDf1 +bwEQAKBByJMoxQ7H6AsQP29qjY8/pfDiNloQDHasUXoOyTfUetam3rY/UWCHFrMD +0jvOHNIqEVJPsSWrxBYf+i4NNECsCSj39JHdVLOkn6pJcRnMzmljS8ojOybYRUTT +KdKlV+jYy6hqAjTvnf/pzZOrNseKyxAo/xETphN2UEBKOZwV5j5YV6VXptt6xn1x +EL1wzahZr6qz/gXn5//mg6aPPUCJt7BPBtC34HGoyHUn4Cx/jSU7zlQLV11VyTyt +/TY69Wgc1k21oS0tm44uw8D+4bIXYewxNq0utt75c75JK5rPKCpIkaSgE3YUPAhM +fpoUxSgo+hrTaocLbQm3/fDfRqYhw9IWrOuWLYEEI5NqS0etq2X+nM2oEXymxUM1 +45dicUv27B1YU5IciRaoA3Bwkl3uyvLhkwBNgJGpBoRsgyWKhlUpdMOSAFPHag0D +HNCKbFTGxZOJ1+BoDsIscK864AodI0YvhMFByWGRwQMszQpK/vg9uUdIMDYTzI0i +nvCrOht4R91z/2VZXHlv4D38UYsVE5P6u7N8T6T4SzERBKSktWhnJmMRJK5FQQwM +zWCnSj9TGMC5+JYeMjRV1pUwpZw8iOlDg0x8LfMQ3XbZ0/bvlPsXOjiYmHAjrLZf +qL0vR5jPyrfVUxF/XHJBBC9SEvvXrEDK+G+V9NmNavUNrhLnABEBAAGJBEQEGAEC +AA8FAlw39W8CGwIFCQH+NIACKQkQFkawG4blAxDBXSAEGQECAAYFAlw39W8ACgkQ +T3dnk2lHW6p0eg/+K2JJu1RbTSLJPFYQhLcxX+5d2unkuNLIy3kArtZuB992E2Fw +00okPGtuPdSyk2ygh4DeYnwmabIWChi7LDp+YnqcI4GfMxNG6RsHs+A/77rLBST3 +BB1sejZppmKCQZDSC2pvYaZBpS80UvftCZ9RFdY+kTC22Btn/5ekiQOfIqhUH9Cy +GWS/YlGciomVIVn1hSPN8l4EpBCDtceRaephvzjQIZT3AxOfSlpwJviYjAOkSX4q +WyIjC5Ke5kfEOldUuBN1JGAm45tKlrz/LD/+VOc2IWpbkOIAVSldUgpRyiIJQAZ8 +0trNxrJI7ncaID8lAa7pBptJiL0KorRjk3c6Y7p830Nwe0J5e5+W1RzN4wlR8+9u +uRyP8Mcwz/Hz2jwMiv38Vk4tAOe4PYNZuDnpjZ28yCpF3UUgvzjarubFAcg2jd8S +auCQFlmOfvT+1qIMSeLmWBOdlzJTUpJRcZqnkEE4WtiMSlxyWVFvUwOmKSGi8CLo +GW1Ksh9thQ9zKhvVUiVoKn4Z79HXr4pX6rnp+mweJ2dEZtlqD7HxjVTlCHn9fzCl +t/Nt0h721fJbS587AC/ZMgg5GV+GKu6Mij0sPAowUJVCIwN9uK/GHICZEAoMSngP +8xzKnhU5FD38vwBvsqbKxTtICrv2NuwnQ0WBBQ58w5mv2RCMr2W6iegSKIDjwxAA +hDpCw0dlUOodY4omJB19Ra9zIZO5IGxT2+oksks3uWkT/l+I7FY0+YNtIZnC01Ge +RJxJtuDwQXigYEKn1UEJ7ymBKrAdCEY0OC344AffLx81aOYWbbW7XaO6rZn8nyZu +0oC95dGlQQdWYJBLcTwANx50iQQGkR5a+XF87yVciFm6x5Cf78pzJ5OBvN3qLJzN +4YBftPMKIgbozGm6/3I6DDT0SMeCOhamshoBf7Ksqd6N+XUjRHZr7UwprWDJlhSC +XFF1e6tjlf22NwZ9UH29VswFkepT99tfBFpobjbzfABO0YnAj72WcR2ZKP7oYHf7 +EkhI2ssWQ9PRPTwdOSXZDEH0s4cJqO+ZzRoAPE+3hbHlGukAqZiiHRlNpOvPdO6Q +mgVBRsURs5i+4vylfat59HUtzQWbTF1bnZbMlefttb5CHRJNb3PTuxHR562Uzp9/ +/SZfDhAx7SYgwRF+FANWJsvX+I7CbP4qvOzutvIYTsNchbCxrOl+0PxMxWaYZzVb +ZW45mO0LFUNCFqcnr3Sot5e9n0C0vjKBV9XgICHKKgeHaMwOMirb1MKvvMpJ3+NI +BYZJ6d+LyhFXL0xJXccUnEXsmk2h4SBEEZYIhAk9ntRmzOXhXFLAOS8agWlmvYwh +xeeb76cVOYlpLw1utXV9hbuo+oM109vMs73mpF88g4g= +=oMDY +-----END PGP PUBLIC KEY BLOCK----- diff --git a/roles/sbs/handlers/main.yml b/roles/sbs/handlers/main.yml new file mode 100644 index 000000000..012cba535 --- /dev/null +++ b/roles/sbs/handlers/main.yml @@ -0,0 +1,9 @@ +--- +- name: Restart sbs containers + community.docker.docker_container: + name: "{{ item }}" + state: started + restart: true + loop: + - sbs + - sbs_server diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml new file mode 100644 index 000000000..8a52adc4e --- /dev/null +++ b/roles/sbs/tasks/main.yml @@ -0,0 +1,171 @@ +--- +- name: "Create SBS group" + group: + name: "{{ sbs.group }}" + state: "present" + register: "result" + +- name: "Save SBS group gid" + set_fact: + sbs_group_gid: "{{ result.gid }}" + +- name: "Create SBS user" + user: + name: "{{ sbs.user }}" + group: "{{ sbs.group }}" + comment: "User to run SBS service" + shell: "/bin/false" + password: "!" + home: "{{ sbs.conf_dir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save sbs user uid" + set_fact: + sbs_user_uid: "{{ result.uid }}" + +- name: "Create directories" + file: + path: "{{item.path}}" + state: "directory" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { path: "{{sbs.work_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}", mode: "0755" } + - { path: "{{sbs.conf_dir}}/saml", mode: "0755" } + - { path: "{{sbs.log_dir}}", mode: "0775" } + - { path: "{{sbs.cert_dir}}", mode: "0755" } + +- name: "Fix file permissions" + file: + path: "{{sbs.log_dir}}/{{item}}" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "0664" + state: "touch" + modification_time: "preserve" + access_time: "preserve" + with_items: + - "sbs.log" + - "sbs_debug.log" + +- name: "Copy wildcard backend cert" + copy: + content: "{{wildcard_backend_cert.pub}}" + dest: "{{sbs.cert_dir}}/backend.crt" + owner: "root" + group: "root" + mode: "0644" + notify: "Restart sbs containers" + +- name: "Copy https cert" + copy: + content: "{{https_cert.cert}}" + dest: "{{sbs.cert_dir}}/frontend.crt" + owner: "root" + group: "root" + mode: "0644" + notify: "Restart sbs containers" + +- name: "Install database certificate" + copy: + dest: "{{sbs.db_cert_path}}" + content: "{{ sbs.db_tls_cert }}" + owner: "root" + group: "root" + mode: "0644" + +- name: "Create SBS config files" + template: + src: "{{item.name}}.j2" + dest: "{{ sbs.conf_dir }}/{{item.name}}" + owner: "root" + group: "{{sbs_group_gid}}" + mode: "{{item.mode}}" + with_items: + - { name: "config.yml", mode: "0644" } + - { name: "alembic.ini", mode: "0644" } + - { name: "disclaimer.css", mode: "0644" } + - { name: "sbs-apache.conf", mode: "0644" } + no_log: "{{sram_ansible_nolog}}" + notify: "Restart sbs containers" + +- name: "Run SBS migrations" + throttle: 1 + community.docker.docker_container: + name: "sbs_migration" + image: "{{ sbs.server_image }}" + pull: "never" + state: "started" + restart_policy: "no" + detach: false + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + CONFIG: "/opt/sbs/server/config/config.yml" + MIGRATIONS_ONLY: "1" + # don't actually run the server + command: "/bin/true" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.log_dir }}:/opt/sbs/log" + networks: + - name: "{{internal_network}}" + register: "result" + failed_when: "'container' not in result or result.container.State.ExitCode != 0" + changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" + notify: "Restart sbs containers" + +# Remove the migration container; we can't do that with auto_remove, because if we use that, ansible +# will not save the output in result +- name: "Remove migration container" + community.docker.docker_container: + name: "sbs_migration" + state: "absent" + # TODO: fix this by only running this if "sbs_image is changed" + changed_when: false + +- name: "Start sbs container" + community.docker.docker_container: + name: "sbs" + image: "{{ sbs.image }}" + pull: "never" + restart_policy: "always" + state: "started" + env: + RUN_MIGRATIONS: "0" + volumes: + - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + networks: + - name: loadbalancer + labels: + traefik.enable: "true" + traefik.docker.network: "{{traefik_network}}" + traefik.http.routers.sbs.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbs.tls: "true" + +- name: "Start SBS server container" + community.docker.docker_container: + name: "sbs_server" + image: "{{ sbs.server_image }}" + restart_policy: "always" + state: "started" + env: + RUNAS_UID: "{{ sbs_user_uid | string }}" + RUNAS_GID: "{{ sbs_group_gid | string }}" + CONFIG: "/opt/sbs/server/config/config.yml" + REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt" + RUN_MIGRATIONS: "0" + pull: "always" + command: "/usr/local/bin/gunicorn --preload --worker-class eventlet --workers 8 --bind 0.0.0.0:8080 server.__main__:app" + volumes: + - "{{ sbs.conf_dir }}:/sbs-config" + - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.log_dir }}:/opt/sbs/log" + - "/tmp/ci-runner:/tmp/ci-runner" + networks: + - name: loadbalander diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sbs/templates/alembic.ini.j2 new file mode 100644 index 000000000..7849e4f89 --- /dev/null +++ b/roles/sbs/templates/alembic.ini.j2 @@ -0,0 +1,72 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = {{ sbs_db_connection_migration }} + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = NOTSET +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = DEBUG +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 new file mode 100644 index 000000000..11f029af7 --- /dev/null +++ b/roles/sbs/templates/config.yml.j2 @@ -0,0 +1,264 @@ +--- +database: + uri: {{ sbs.db_connection_sbs }} + +redis: +{% if environment_name == 'tst2' %} + uri: "redis://{{redis_vhost}}/" +{% else %} + uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" +{% endif %} + +# add a per-release suffix here to invalidate sessions on new releases +secret_key: {{ sbs.db_secret }}{{sbs.secret_key_suffix}} +# Must be a base64 encoded key of 128, 192, or 256 bits. +# Generate: base64.b64encode(os.urandom(256 // 8)).decode() +encryption_key: {{ sbs.encryption_key }} + +# Lifetime of session in minutes (one day is 60 * 24) +permanent_session_lifetime: {{ sbs.session_lifetime }} + +logging: + log_to_stdout: {{ sbs.log_to_stdout }} + +# Valid scopes are "READ" and "WRITE" +api_users: +{% for name, user in sbs.api_users.items() %} + - name: "{{ name }}" + password: "{{ user.password }}" + scopes: "[ {{ user.scopes | join(', ') }} ]" +{% endfor %} + +oidc: + client_id: "{{ sbs.client_id }}" + client_secret: "{{ sbs.client_secret }}" + audience: "{{ sbs.oidc_jwt_audience }}" + verify_peer: {{ sbs.oidc_verify_peer }} + authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" + token_endpoint: "{{ sbs.oidc_token_endpoint }}" + userinfo_endpoint: "{{ sbs.oidc_userinfo_endpoint }}" + jwks_endpoint: "{{ sbs.oidc_jwks_endpoint }}" + #Note that the paths for these uri's is hardcoded and only domain and port differ per environment + redirect_uri: "{{ sbs.oidc_redirect_uri }}" + continue_eduteams_redirect_uri: "{{ sbs.eduteams_continue_endpoint }}" + continue_eb_redirect_uri: "{{ sbs.eb_continue_endpoint }}" + second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} + totp_token_name: "{{ sbs.totp_token_name }}" + # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id + sram_service_entity_id: "{{ sbs.client_id }}" + scopes: {{ sbs.oidc_scopes }} + +base_scope: "{{ base_domain }}" +entitlement_group_namespace: "{{ sbs.urn_namespace }}" +eppn_scope: " {{ sbs.eppn_scope }}" +scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" +collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" + +{% if environment_name == "prd" %} +environment_disclaimer: "" +{% else %} +environment_disclaimer: "{{ sbs.disclaimer_label }}" +{% endif %} + +# All services in the white list can be requested in the create-restricted-co API +# The default organisation is a fallback for when the administrator has no schac_home_org +restricted_co: + services_white_list: [ "https://cloud" ] + default_organisation: "{{ sbs.restricted_co_default_org }}" + +mail: + host: {{ sbs.mail_host }} + port: {{ sbs.mail_port }} + sender_name: {{ sbs.mail_sender_name }} + sender_email: {{ sbs.mail_sender_email }} + suppress_sending_mails: False + info_email: {{ sbs.support_email }} + beheer_email: {{ sbs.admin_email }} + ticket_email: {{ sbs.ticket_email }} + eduteams_email: {{ sbs.eduteams_email }} + # Do we mail a summary of new Organizations and Services to the beheer_email? + audit_trail_notifications_enabled: {{ sbs.audit_trail_notifications_enabled }} + account_deletion_notifications_enabled: True + send_exceptions: {{ sbs.send_exceptions }} + send_js_exceptions: {{ sbs.send_js_exceptions }} + send_exceptions_recipients: [ "{{ sbs.exceptions_mail }}" ] + environment: "{{ base_domain }}" + +manage: + enabled: {{ sbs.manage_base_enabled }} + # The entity_id of the SRAM RP in Manage for API retrieval, e.g "sbs.test2.sram.surf.nl" + sram_rp_entity_id: "{{ sbs.manage_sram_rp_entity_id }}" + base_url: "{{ sbs.manage_base_url }}" + user: "{{ sbs.manage_user }}" + password: "{{ sbs.manage_password }}" + verify_peer: {{ sbs.manage_verify_peer }} + +aup: + version: 1 + url_aup_en: "https://edu.nl/6wb63" + url_aup_nl: "https://edu.nl/6wb63" + +base_url: {{ sbs.base_url }} +socket_url: {{ sbs.base_url }} +base_server_url: {{ sbs.base_url }} +wiki_link: {{ sbs.wiki_link }} + +admin_users: +{% for admin_user in sbs.admin_users %} + - uid: "{{ admin_user.uid }}" +{% endfor %} + +organisation_categories: + - "HBO" + - "MBO" + - "UMC" + - "University" + - "Research" + - "SURF" + +feature: + seed_allowed: {{ sbs.seed_allowed }} + api_keys_enabled: {{ sbs.api_keys_enabled }} + feedback_enabled: {{ sbs.feedback_enabled }} + impersonation_allowed: {{ sbs.impersonation_allowed }} + sbs_swagger_enabled: {{ sbs.swagger_enabled }} + admin_platform_backdoor_totp: {{ sbs.admin_platform_backdoor_totp }} + past_dates_allowed: {{ sbs.past_dates_allowed }} + mock_scim_enabled: {{ sbs.mock_scim_enabled }} + +metadata: + idp_url: "{{sbs.idp_metadata_url}}" + parse_at_startup: True + # No need for environment specific values + scope_override: + knaw.nl: "Koninklijke Nederlandse Akademie van Wetenschappen (KNAW)" + +platform_admin_notifications: + # Do we daily check for CO join_requests and CO requests and send a summary mail to beheer_email? + enabled: False + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we include open join_requests in the summary + outstanding_join_request_days_threshold: 7 + # How long before we include open CO requests in the summary + outstanding_coll_request_days_threshold: 7 + +user_requests_retention: + # Do we daily check for CO join_requests and CO requests and delete approved and denied? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long before we delete approved / denied join_requests + outstanding_join_request_days_threshold: 90 + # How long before we delete approved / denied CO requests + outstanding_coll_request_days_threshold: 90 + +# The retention config determines how long users may be inactive, how long the reminder email is valid and when do we resent the magic link +retention: + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # how many days of inactivity before a user is suspended + # 0 allows for any last_login_date in the past to trigger suspension notification + allowed_inactive_period_days: {{ sbs.suspension_inactive_days }} + # how many days before suspension do we send a warning + # -1 will suspend notified users on second suspension cron + reminder_suspend_period_days: {{ sbs.suspension_reminder_days }} + # how many days after suspension do we delete the account + remove_suspended_users_period_days: 90 + # how many days before deletion do we send a reminder + reminder_expiry_period_days: 7 + # whether to send a notification of the result of the retention process to the beheer_email + admin_notification_mail: {{ sbs.suspension_notify_admin }} + +collaboration_expiration: + # Do we daily check for CO's that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired collaborations + expired_collaborations_days_threshold: 90 + # How many days before actual expiration do we mail the organisation members + expired_warning_mail_days_threshold: 10 + +collaboration_suspension: + # Do we daily check for CO's that will be suspended because of inactivity? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # After how many days of inactivity do we suspend collaborations + collaboration_inactivity_days_threshold: 365 + # How many days before actual suspension do we mail the organisation members + inactivity_warning_mail_days_threshold: 10 + # After how many days after suspension do we actually delete the collaboration + collaboration_deletion_days_threshold: 90 + +membership_expiration: + # Do we daily check for memberships that will be deleted because they have been expired? + enabled: {{ sbs.notifications_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration do we actually delete expired memberships + expired_memberships_days_threshold: 90 + # How many days before actual expiration do we mail the co admin and member + expired_warning_mail_days_threshold: 10 + +invitation_reminders: + # Do we daily check for invitations that need a reminder? + enabled: {{ sbs.invitation_reminders_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How many days before expiration of an invitation do we remind the user? + invitation_reminders_threshold: 5 + +invitation_expirations: + # Do we daily check for invitations that are expired / accepted and are eligible for deletion ? + enabled: {{ sbs.invitation_expirations_enabled }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after expiration of an invitation do we delete the invitation? + nbr_days_remove_expired_invitations: 10 + # How long after expiration of an API created invitation do we delete the invitation? + nbr_days_remove_api_expired_invitations: 30 + +orphan_users: + # Do we daily check for users that are orphans soo they can be deleted? + enabled: {{ sbs.delete_orphaned }} + cron_hour_of_day: {{ sbs.cron_hour_of_day }} + # How long after created do we delete orphan users + delete_days_threshold: 14 + +open_requests: + # Do we weekly check for all open requests? + enabled: {{ sbs.open_requests_enabled }} + cron_day_of_week: 1 + +scim_sweep: + # Do we enable scim sweeps? + enabled: {{ sbs.scim_sweep }} + # How often do we check if scim sweeps are needed per service + cron_minutes_expression: "*/15" + +ldap: + url: "{{ sbs.ldap_url }}" + bind_account: "{{ sbs.ldap_bind_account }}" + +# A MFA login in a different flow is valid for X minutes +mfa_sso_time_in_minutes: {{sbs.mfa_sso_minutes}} + +# whether to fall back to TOTP MFA +mfa_fallback_enabled: {{sbs.mfa_fallback_enabled}} + +# Lower case entity ID's and schac_home allowed skipping MFA. +# Note that for a login directly into SRAM only schac_home can be used as the entity_idp of the IdP is unknown +mfa_idp_allowed: {{sbs.mfa_idp_allowed}} + +# Lower case schachome organisations / entity ID's where SURFSecure ID is used for step-up +ssid_identity_providers: {{sbs.ssid_identity_providers}} + +ssid_config_folder: saml + +pam_web_sso: + session_timeout_seconds: 300 + +rate_limit_totp_guesses_per_30_seconds: 10 + +# The uid's of user that will never be suspended or deleted +excluded_user_accounts: +{% for excluded_user in sbs.excluded_users %} + - uid: "{{ excluded_user.uid }}" +{% endfor %} + +engine_block: + api_token: {{ sbs.engine_block_api_token }} diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 new file mode 100644 index 000000000..e89bbfce7 --- /dev/null +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -0,0 +1,6 @@ +{% if environment_name!="prd" -%} +body::after { + background: {{ sbs_disclaimer_color }}; + content: "{{ sbs_disclaimer_label }}"; +} +{% endif %} diff --git a/roles/sbs/templates/saml_advanced_settings.json.j2 b/roles/sbs/templates/saml_advanced_settings.json.j2 new file mode 100644 index 000000000..bdde32050 --- /dev/null +++ b/roles/sbs/templates/saml_advanced_settings.json.j2 @@ -0,0 +1,35 @@ +{ + "security": { + "nameIdEncrypted": false, + "authnRequestsSigned": true, + "logoutRequestSigned": false, + "logoutResponseSigned": false, + "signMetadata": false, + "wantMessagesSigned": false, + "wantAssertionsSigned": true, + "wantNameId" : true, + "wantNameIdEncrypted": false, + "wantAttributeStatement": false, + "wantAssertionsEncrypted": false, + "requestedAuthnContext": ["{{sbs_ssid_authncontext}}"], + "requestedAuthnContextComparison": "minimum", + "failOnAuthnContextMismatch": false, + "allowSingleLabelDomains": false, + "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", + "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256", + "rejectDeprecatedAlgorithm": true + }, + "contactPerson": { + "technical": { + "givenName": "{{ mail.admin_name }}", + "emailAddress": "{{ mail.admin_address }}" + } + }, + "organization": { + "en-US": { + "name": "{{ org.name }}", + "displayname": "{{ org.name }}", + "url": "{{ org.url }}" + } + } +} diff --git a/roles/sbs/templates/saml_settings.json.j2 b/roles/sbs/templates/saml_settings.json.j2 new file mode 100644 index 000000000..bb5788e97 --- /dev/null +++ b/roles/sbs/templates/saml_settings.json.j2 @@ -0,0 +1,22 @@ +{ + "strict": true, + "debug": true, + "sp": { + "entityId": "{{ sbs_surf_secure_id.sp_entity_id }}", + "assertionConsumerService": { + "url": "{{ sbs_surf_secure_id.acs_url }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" + }, + "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", + "x509cert": "{{ sbs_surf_secure_id.pub | barepem }}", + "privateKey": "{{ sbs_surf_secure_id.priv | barepem }}" + }, + "idp": { + "entityId": "{{ sbs_ssid_entityid }}", + "singleSignOnService": { + "url": "{{ sbs_ssid_sso_endpoint }}", + "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" + }, + "x509cert": "{{ sbs_surf_secure_id.sa_idp_certificate | barepem }}" + } +} diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 new file mode 100644 index 000000000..13752ee56 --- /dev/null +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -0,0 +1,30 @@ +ServerName {{ hostnames.sbs }} +#ErrorLog /proc/self/fd/2 +#CustomLog /proc/self/fd/1 common +DocumentRoot /opt/sbs/client/build + +Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;" +Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()" + +RewriteEngine On +RewriteCond %{REQUEST_URI} !^/(api|pam-weblogin|flasgger_static|swagger|health|config|info|socket.io) +RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f +RewriteRule ^/(.*)$ /index.html [L] + +ProxyRequests off +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ +ProxyPassReverse / http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ +ProxyPass /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ +ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ + + + Header set Cache-Control: "public, max-age=31536000, immutable" + + + Header set Cache-Control: "no-cache, private" + + + + Require all granted + Options -Indexes + diff --git a/roles/sbs/templates/sbs.service.j2 b/roles/sbs/templates/sbs.service.j2 new file mode 100644 index 000000000..2920ddc8d --- /dev/null +++ b/roles/sbs/templates/sbs.service.j2 @@ -0,0 +1,32 @@ +[Unit] +Description=SBS +After=network.target + +[Service] +DynamicUser=true +User=_sram_sbs +Group=_sram_sbs +SupplementaryGroups={{sbs_group}} + +WorkingDirectory={{sbs_git_dir}} +ReadWritePaths={{sbs_log_dir}} +NoNewPrivileges=true +PrivateTmp=true + +Environment="CONFIG=config/config.yml" +Environment="PROFILE=log_to_stdout" +# the python requests module uses the CAs provided by the certifi package by default +# we'll just take the OS-provided CAs, thankyouverymuch +Environment="REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt" + +Type=notify +ExecStart={{sbs_env_dir}}/bin/gunicorn --worker-class eventlet --workers {{sbs_num_workers}} --bind 127.0.0.1:8080 server.__main__:app + +Restart=on-failure +RestartSec=10 + +KillMode=mixed +TimeoutStopSec=5 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sbs/vars/main.yml b/roles/sbs/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sbs/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext From 794e531f523e582e1148694287c97e65d59177cf Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Wed, 25 Mar 2026 13:59:09 +0100 Subject: [PATCH 2/7] WIP --- roles/sbs/defaults/main.yml | 69 ++++++++----- roles/sbs/handlers/main.yml | 4 +- roles/sbs/tasks/main.yml | 129 +++++++++++++++---------- roles/sbs/templates/alembic.ini.j2 | 2 +- roles/sbs/templates/config.yml.j2 | 6 +- roles/sbs/templates/disclaimer.css.j2 | 6 +- roles/sbs/templates/sbs-apache.conf.j2 | 14 +-- 7 files changed, 139 insertions(+), 91 deletions(-) diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index f21addf5f..7a907df66 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -1,38 +1,49 @@ --- -sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" +sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" + sbs_defaults: + base_domain: "test2.sram.surf.nl" + environment_name: test + ansible_nolog: true + base_url: "https://{{ sbs_defaults.base_domain }}" + server_image: "ghcr.io/surfscz/sram-sbs-server:main" + client_image: "ghcr.io/surfscz/sram-sbs-client:main" + openidc_timeout: 86400 sram_conf_dir: "{{ current_release_appdir }}/sram" - work_dir: "{{ sram_conf_dir }}/sbs" - git_dir: "{{ sbs.work_dir }}/sbs" - env_dir: "{{ sbs.work_dir }}/sbs-env" - conf_dir: "{{ sbs.work_dir }}/config" - log_dir: "{{ sbs.work_dir }}/log" - cert_dir: "{{ sbs.work_dir }}/cert" - apache_conf: "{{ sbs.work_dir }}/sbs.conf" - nginx_conf: "{{ sbs.work_dir }}/nginx.conf" - + work_dir: "{{ sbs_defaults.sram_conf_dir }}/sbs" + git_dir: "{{ sbs_defaults.work_dir }}/sbs" + env_dir: "{{ sbs_defaults.work_dir }}/sbs-env" + conf_dir: "{{ sbs_defaults.work_dir }}/config" + log_dir: "{{ sbs_defaults.work_dir }}/log" + cert_dir: "{{ sbs_defaults.work_dir }}/cert" + apache_conf: "{{ sbs_defaults.work_dir }}/sbs_defaults.conf" + nginx_conf: "{{ sbs_defaults.work_dir }}/nginx.conf" db_name: "sbs" db_user: "sbsrw" - dbbackup_user: "sbs_backupper" - migration_user: "sbs_migrater" + # dbbackup_user: "sbs_backupper" + migration_user: "sbsmigrate" db_connection: "\ - mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_db_name }}\ + mysql+mysqldb://%s:%s@{{ mariadb_host }}/{{ sbs_defaults.db_name }}\ ?ssl=true&charset=utf8mb4" - db_connection_sbs: "{{ sbs_db_connection | format(sbs_db_user, sbs_db_password) }}" + db_connection_sbs: "{{ sbs_defaults.db_connection | format(sbs_defaults.db_user, mysql_passwords.sbs) }}" db_connection_migration: "\ - {{ sbs_db_connection | format(sbs_migration_user, sbs_migration_password) }}" + {{ sbs_defaults.db_connection | format(sbs_defaults.migration_user, mysql_passwords.sbsmigrate) }}" + + db_secret: secret + secret_key_suffix: suffix + encryption_key: encryption_key redis_host: redis redis_port: 6379 redis_ssl: false redis_user: default - mail_host: "{{ mail.relay_to }}" - mail_port: "{{ mail.relay_port }}" + mail_host: "host.docker.internal" + mail_port: 25 user: "sbs" group: "sbs" @@ -94,7 +105,7 @@ sbs_defaults: oidc_token_endpoint: "http://localhost/OIDC/token" oidc_userinfo_endpoint: "http://localhost/OIDC/userinfo" oidc_jwks_endpoint: "http://localhost/OIDC/jwks.json" - oidc_redirect_uri: "https://sbs.scz-vm.net/api/users/resume-session" + oidc_redirect_uri: "https://{{sbs_defaults.base_domain}}/api/users/resume-session" mfa_idp_allowed: false eduteams_continue_endpoint: "https://localhost/continue" eb_continue_endpoint: "https://engine.(.*)surfconext.nl(.*)" @@ -105,20 +116,20 @@ sbs_defaults: - openid manage_base_enabled: False - manage_base_url: "https://manage.test2.surfconext.nl" - manage_sram_rp_entity_id: "sbs.test2.sram.surf.nl" + manage_base_url: "https://manage.{{base_domain}}" + manage_sram_rp_entity_id: "sbs.{{sbs_defaults.base_domain}}" manage_verify_peer: False idp_metadata_url: "https://metadata.surfconext.nl/signed/2023/edugain-downstream-idp.xml " - backup_dir: "{{backup_base}}/sbs" + # backup_dir: "{{backup_base}}/sbs" swagger_enabled: true ssid_identity_providers: [] surf_secure_id: environment: "unknown.example.org" - sp_entity_id: "https://sbs.{{base_domain}}" - acs_url: "https://{{base_domain}}/api/users/acs" + sp_entity_id: "https://sbs.{{sbs_defaults.base_domain}}" + acs_url: "https://{{sbs_defaults.base_domain}}/api/users/acs" sa_gw_environment: "sa-gw.unknown.example.org" sa_idp_certificate: | -----BEGIN CERTIFICATE----- @@ -134,11 +145,11 @@ sbs_defaults: -----END CERTIFICATE----- ssid_authncontext: "\ - http://{{ sbs.surf_secure_id.environment }}/assurance/sfo-level2" + http://{{ sbs_defaults.surf_secure_id.environment }}/assurance/sfo-level2" ssid_entityid: "\ - https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/metadata" ssid_sso_endpoint: "\ - https://{{ sbs.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" + https://{{ sbs_defaults.surf_secure_id.sa_gw_environment }}/second-factor-only/single-sign-on" mfa_sso_minutes: 10 mfa_fallback_enabled: true @@ -157,3 +168,9 @@ sbs_defaults: - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request engine_block_api_token: secret + + # wildcard_backend_cert: + # pub: | + # -----BEGIN CERTIFICATE----- + # 12345 + # -----END CERTIFICATE----- diff --git a/roles/sbs/handlers/main.yml b/roles/sbs/handlers/main.yml index 012cba535..2d7710d43 100644 --- a/roles/sbs/handlers/main.yml +++ b/roles/sbs/handlers/main.yml @@ -5,5 +5,5 @@ state: started restart: true loop: - - sbs - - sbs_server + - sbs-client + - sbs-server diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml index 8a52adc4e..ecc82bff7 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sbs/tasks/main.yml @@ -1,4 +1,9 @@ --- +# - name: "Initialize database" +# throttle: 1 +# import_tasks: "database_init.yml" +# when: "is_dev" + - name: "Create SBS group" group: name: "{{ sbs.group }}" @@ -29,7 +34,7 @@ file: path: "{{item.path}}" state: "directory" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: @@ -42,7 +47,7 @@ - name: "Fix file permissions" file: path: "{{sbs.log_dir}}/{{item}}" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "0664" state: "touch" @@ -50,39 +55,43 @@ access_time: "preserve" with_items: - "sbs.log" - - "sbs_debug.log" - -- name: "Copy wildcard backend cert" - copy: - content: "{{wildcard_backend_cert.pub}}" - dest: "{{sbs.cert_dir}}/backend.crt" - owner: "root" - group: "root" - mode: "0644" - notify: "Restart sbs containers" + - "sbs.debug.log" -- name: "Copy https cert" - copy: - content: "{{https_cert.cert}}" - dest: "{{sbs.cert_dir}}/frontend.crt" - owner: "root" - group: "root" - mode: "0644" - notify: "Restart sbs containers" +# - name: "Copy wildcard backend cert" +# copy: +# content: "{{wildcard_backend_cert.pub}}" +# dest: "{{sbs.cert_dir}}/backend.crt" +# owner: "root" +# group: "root" +# mode: "0644" +# notify: "Restart sbs containers" -- name: "Install database certificate" - copy: - dest: "{{sbs.db_cert_path}}" - content: "{{ sbs.db_tls_cert }}" - owner: "root" - group: "root" - mode: "0644" +# - name: "Copy https cert" +# copy: +# content: "{{https_cert.cert}}" +# dest: "{{sbs.cert_dir}}/frontend.crt" +# owner: "root" +# group: "root" +# mode: "0644" +# notify: "Restart sbs containers" + +# - name: "Install database certificate" +# copy: +# dest: "{{sbs.db_cert_path}}" +# content: "{{ sbs.db_tls_cert }}" +# owner: "root" +# group: "root" +# mode: "0644" +- name: "Touch file in {{ sbs.cert_dir }}" + ansible.builtin.file: + path: "{{sbs.cert_dir}}/dummy" + state: file - name: "Create SBS config files" template: src: "{{item.name}}.j2" dest: "{{ sbs.conf_dir }}/{{item.name}}" - owner: "root" + owner: "{{sbs_user_uid}}" group: "{{sbs_group_gid}}" mode: "{{item.mode}}" with_items: @@ -90,13 +99,35 @@ - { name: "alembic.ini", mode: "0644" } - { name: "disclaimer.css", mode: "0644" } - { name: "sbs-apache.conf", mode: "0644" } - no_log: "{{sram_ansible_nolog}}" + no_log: "{{sbs.ansible_nolog}}" notify: "Restart sbs containers" +- name: "Pull sbs image" + community.docker.docker_image_pull: + name: "{{ item }}" + with_items: + - "{{ sbs.client_image }}" + - "{{ sbs.server_image }}" + register: "sbs_image" + +# We need to remove sram-static so it gets repopulated +# with new SBS image static content +- name: "Clean up old containers" + block: + - name: "Stop and remove sbs and sbs-server containers" + community.docker.docker_container: + name: "{{ item }}" + state: "absent" + with_items: + - "sbs-client" + - "sbs-server" + + when: "sbs_image is changed" + - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: - name: "sbs_migration" + name: "sbs-migration" image: "{{ sbs.server_image }}" pull: "never" state: "started" @@ -105,52 +136,49 @@ env: RUNAS_UID: "{{ sbs_user_uid | string }}" RUNAS_GID: "{{ sbs_group_gid | string }}" - CONFIG: "/opt/sbs/server/config/config.yml" MIGRATIONS_ONLY: "1" # don't actually run the server command: "/bin/true" volumes: - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - "{{ sbs.log_dir }}:/opt/sbs/log" networks: - - name: "{{internal_network}}" + - name: "loadbalancer" register: "result" failed_when: "'container' not in result or result.container.State.ExitCode != 0" changed_when: "'[alembic.runtime.migration] Running upgrade' in result.container.Output" notify: "Restart sbs containers" -# Remove the migration container; we can't do that with auto_remove, because if we use that, ansible +# Remove the migration container; we can do that with auto_remove, because if we use that, ansible # will not save the output in result - name: "Remove migration container" community.docker.docker_container: - name: "sbs_migration" + name: "sbs-migration" state: "absent" # TODO: fix this by only running this if "sbs_image is changed" changed_when: false -- name: "Start sbs container" +- name: "Start sbs client container" community.docker.docker_container: - name: "sbs" - image: "{{ sbs.image }}" + name: "sbs-client" + image: "{{ sbs.client_image }}" pull: "never" restart_policy: "always" state: "started" - env: - RUN_MIGRATIONS: "0" volumes: - "{{ sbs.conf_dir }}/sbs-apache.conf:/etc/apache2/sites-enabled/sbs.conf:ro" + - "{{ sbs.conf_dir }}/disclaimer.css:/opt/sbs/client/dist/disclaimer.css:ro" networks: - - name: loadbalancer + - name: "loadbalancer" labels: + traefik.http.routers.sbsclient.rule: "Host(`{{ sbs.base_domain }}`)" + traefik.http.routers.sbsclient.tls: "true" traefik.enable: "true" - traefik.docker.network: "{{traefik_network}}" - traefik.http.routers.sbs.rule: "Host(`{{ sbs.base_domain }}`)" - traefik.http.routers.sbs.tls: "true" - name: "Start SBS server container" community.docker.docker_container: - name: "sbs_server" + name: "sbs-server" image: "{{ sbs.server_image }}" restart_policy: "always" state: "started" @@ -160,12 +188,15 @@ CONFIG: "/opt/sbs/server/config/config.yml" REQUESTS_CA_BUNDLE: "/etc/ssl/certs/ca-certificates.crt" RUN_MIGRATIONS: "0" - pull: "always" - command: "/usr/local/bin/gunicorn --preload --worker-class eventlet --workers 8 --bind 0.0.0.0:8080 server.__main__:app" + pull: "never" volumes: - "{{ sbs.conf_dir }}:/sbs-config" - - "{{ sbs.cert_dir }}:/sbs-config/cert" + - "{{ sbs.cert_dir }}:/sbs-config/cert:ro" - "{{ sbs.log_dir }}:/opt/sbs/log" - "/tmp/ci-runner:/tmp/ci-runner" networks: - - name: loadbalander + - name: "loadbalancer" + # TODO: fix this: this is only for dev + etc_hosts: + oidc-op.scz-vm.net: "172.20.1.24" + host.docker.internal: host-gateway diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sbs/templates/alembic.ini.j2 index 7849e4f89..9ccd51979 100644 --- a/roles/sbs/templates/alembic.ini.j2 +++ b/roles/sbs/templates/alembic.ini.j2 @@ -35,7 +35,7 @@ script_location = migrations # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = {{ sbs_db_connection_migration }} +sqlalchemy.url = {{ sbs.db_connection_migration }} # Logging configuration [loggers] diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index 11f029af7..4c38c4100 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -3,8 +3,8 @@ database: uri: {{ sbs.db_connection_sbs }} redis: -{% if environment_name == 'tst2' %} - uri: "redis://{{redis_vhost}}/" +{% if sbs.environment_name == 'test2' %} + uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" {% else %} uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" {% endif %} @@ -54,7 +54,7 @@ eppn_scope: " {{ sbs.eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" -{% if environment_name == "prd" %} +{% if sbs.environment_name == "prd" %} environment_disclaimer: "" {% else %} environment_disclaimer: "{{ sbs.disclaimer_label }}" diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 index e89bbfce7..04c62aa86 100644 --- a/roles/sbs/templates/disclaimer.css.j2 +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -1,6 +1,6 @@ -{% if environment_name!="prd" -%} +{% if sbs.environment_name!="prd" -%} body::after { - background: {{ sbs_disclaimer_color }}; - content: "{{ sbs_disclaimer_label }}"; + background: {{ sbs.disclaimer_color }}; + content: "{{ sbs.disclaimer_label }}"; } {% endif %} diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 index 13752ee56..99fda3d13 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -1,7 +1,7 @@ -ServerName {{ hostnames.sbs }} +ServerName {{ sbs.base_domain }} #ErrorLog /proc/self/fd/2 #CustomLog /proc/self/fd/1 common -DocumentRoot /opt/sbs/client/build +DocumentRoot /opt/sbs/client/dist Header set Content-Security-Policy "default-src 'self'; base-uri 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; frame-src 'none'; form-action 'self' https://*.{{ base_domain }}; frame-ancestors 'none'; block-all-mixed-content;" Header set Permissions-Policy "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(self), gamepad=(), speaker-selection=()" @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ -ProxyPassReverse / http://{{ containers.sbs_server }}:{{sbs_backend_port}}/ -ProxyPass /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ -ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port}}/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:{{sbs.backend_port}}/ +ProxyPassReverse / http://sbs-server:{{sbs.backend_port}}/ +ProxyPass /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ +ProxyPassReverse /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" @@ -24,7 +24,7 @@ ProxyPassReverse /socket.io/ ws://{{ containers.sbs_server }}:{{sbs_backend_port Header set Cache-Control: "no-cache, private" - + Require all granted Options -Indexes From 49bb00a17f7c149e8b9e57f69da8089ea04dd849 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 26 Mar 2026 10:15:14 +0100 Subject: [PATCH 3/7] WIP --- roles/sbs/defaults/main.yml | 9 --------- roles/sbs/tasks/main.yml | 27 ++------------------------ roles/sbs/templates/config.yml.j2 | 6 +++--- roles/sbs/templates/sbs-apache.conf.j2 | 8 ++++---- 4 files changed, 9 insertions(+), 41 deletions(-) diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index 7a907df66..c42b80948 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -72,9 +72,6 @@ sbs_defaults: wiki_link: "https://www.example.org/wiki" - backend_port: 8080 - num_workers: 2 - cron_hour_of_day: 4 seed_allowed: True api_keys_enabled: True @@ -168,9 +165,3 @@ sbs_defaults: - 'sha256-WTC9gHKjIpzl5ub1eg/YrRy/k+jlzeyRojah9dxAApc=' # on /new-service-request engine_block_api_token: secret - - # wildcard_backend_cert: - # pub: | - # -----BEGIN CERTIFICATE----- - # 12345 - # -----END CERTIFICATE----- diff --git a/roles/sbs/tasks/main.yml b/roles/sbs/tasks/main.yml index ecc82bff7..560191f8a 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sbs/tasks/main.yml @@ -57,31 +57,8 @@ - "sbs.log" - "sbs.debug.log" -# - name: "Copy wildcard backend cert" -# copy: -# content: "{{wildcard_backend_cert.pub}}" -# dest: "{{sbs.cert_dir}}/backend.crt" -# owner: "root" -# group: "root" -# mode: "0644" -# notify: "Restart sbs containers" - -# - name: "Copy https cert" -# copy: -# content: "{{https_cert.cert}}" -# dest: "{{sbs.cert_dir}}/frontend.crt" -# owner: "root" -# group: "root" -# mode: "0644" -# notify: "Restart sbs containers" - -# - name: "Install database certificate" -# copy: -# dest: "{{sbs.db_cert_path}}" -# content: "{{ sbs.db_tls_cert }}" -# owner: "root" -# group: "root" -# mode: "0644" +# Create dummy file in certs dir to pacify container pre-init script +# https://github.com/SURFscz/SBS/pull/2312 - name: "Touch file in {{ sbs.cert_dir }}" ansible.builtin.file: path: "{{sbs.cert_dir}}/dummy" diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index 4c38c4100..eba293de9 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -30,8 +30,8 @@ api_users: {% endfor %} oidc: - client_id: "{{ sbs.client_id }}" - client_secret: "{{ sbs.client_secret }}" + client_id: "{{ sbs.oidc_client_id }}" + client_secret: "{{ sbs.oidc_client_secret }}" audience: "{{ sbs.oidc_jwt_audience }}" verify_peer: {{ sbs.oidc_verify_peer }} authorization_endpoint: "{{ sbs.oidc_authz_endpoint}}" @@ -45,7 +45,7 @@ oidc: second_factor_authentication_required: {{ sbs.second_factor_authentication_required }} totp_token_name: "{{ sbs.totp_token_name }}" # The service_id in the proxy_authz endpoint when logging into SBS. Most likely to equal the oidc.client_id - sram_service_entity_id: "{{ sbs.client_id }}" + sram_service_entity_id: "{{ sbs.oidc_client_id }}" scopes: {{ sbs.oidc_scopes }} base_scope: "{{ base_domain }}" diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sbs/templates/sbs-apache.conf.j2 index 99fda3d13..0743c2ddb 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sbs/templates/sbs-apache.conf.j2 @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:{{sbs.backend_port}}/ -ProxyPassReverse / http://sbs-server:{{sbs.backend_port}}/ -ProxyPass /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ -ProxyPassReverse /socket.io/ ws://sbs-server:{{sbs.backend_port}}/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:8080/ +ProxyPassReverse / http://sbs-server:8080/ +ProxyPass /socket.io/ ws://sbs-server:8080/socket.io/ +ProxyPassReverse /socket.io/ ws://sbs-server:8080/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" From aab37d9a22630c19bc75fce6d2cb70878a2f7ed8 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Thu, 26 Mar 2026 16:58:29 +0100 Subject: [PATCH 4/7] Add ldap role --- .../haproxy/templates/haproxy_backend.cfg.j2 | 16 + .../haproxy/templates/haproxy_frontend.cfg.j2 | 46 ++- roles/ldap/defaults/main.yml | 41 ++ roles/ldap/files/eduMember.ldif | 27 ++ roles/ldap/files/eduPerson.ldif | 83 ++++ roles/ldap/files/groupOfMembers.ldif | 19 + roles/ldap/files/ldap-add | 51 +++ roles/ldap/files/ldapPublicKey.ldif | 21 + roles/ldap/files/logrotate_slapd | 13 + roles/ldap/files/rsyslog_slapd.conf | 2 + roles/ldap/files/sczGroup.ldif | 23 ++ roles/ldap/files/sramPerson.ldif | 23 ++ roles/ldap/files/voPerson.ldif | 44 +++ roles/ldap/handlers/main.yml | 24 ++ roles/ldap/tasks/admins.yml | 82 ++++ roles/ldap/tasks/main.yml | 369 ++++++++++++++++++ roles/ldap/templates/ldap-backup.sh.j2 | 19 + roles/ldap/templates/ldap.conf.j2 | 16 + roles/ldap/templates/slapd.service.j2 | 20 + roles/ldap/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 1 - roles/sbs/templates/config.yml.j2 | 4 +- roles/sbs/templates/disclaimer.css.j2 | 2 +- 23 files changed, 928 insertions(+), 19 deletions(-) create mode 100644 roles/ldap/defaults/main.yml create mode 100644 roles/ldap/files/eduMember.ldif create mode 100644 roles/ldap/files/eduPerson.ldif create mode 100644 roles/ldap/files/groupOfMembers.ldif create mode 100644 roles/ldap/files/ldap-add create mode 100644 roles/ldap/files/ldapPublicKey.ldif create mode 100644 roles/ldap/files/logrotate_slapd create mode 100644 roles/ldap/files/rsyslog_slapd.conf create mode 100644 roles/ldap/files/sczGroup.ldif create mode 100644 roles/ldap/files/sramPerson.ldif create mode 100644 roles/ldap/files/voPerson.ldif create mode 100644 roles/ldap/handlers/main.yml create mode 100644 roles/ldap/tasks/admins.yml create mode 100644 roles/ldap/tasks/main.yml create mode 100644 roles/ldap/templates/ldap-backup.sh.j2 create mode 100644 roles/ldap/templates/ldap.conf.j2 create mode 100644 roles/ldap/templates/slapd.service.j2 create mode 100644 roles/ldap/vars/main.yml diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2 index d2387c033..ea52ac5b0 100644 --- a/roles/haproxy/templates/haproxy_backend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_backend.cfg.j2 @@ -67,3 +67,19 @@ {% endfor %} {% endif %} {% endfor %} + +{% if haproxy_ldap_servers is defined %} +#--------------------------------------------------------------------- +# ldap backend +#--------------------------------------------------------------------- +backend ldap_servers + mode tcp + option tcpka + + option ldap-check + + {% for server in haproxy_ldap_servers -%} + server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %} + {% endfor %} +{% endfor %} +{% endig %} diff --git a/roles/haproxy/templates/haproxy_frontend.cfg.j2 b/roles/haproxy/templates/haproxy_frontend.cfg.j2 index 6082e9c03..4909a0074 100644 --- a/roles/haproxy/templates/haproxy_frontend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_frontend.cfg.j2 @@ -12,8 +12,8 @@ frontend stats # ------------------------------------------------------------------- frontend internet_ip - bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip.ipv4 }}:80 transparent bind {{ haproxy_sni_ip.ipv6 }}:80 transparent # Logging is done in the local_ip backend, otherwise all requests are logged twice @@ -30,7 +30,7 @@ frontend internet_ip http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -51,7 +51,7 @@ frontend internet_ip http-response replace-header Set-Cookie (?i)(^(?!.*samesite).*$) \1;\ SameSite=None if !no_same_site_uas # Remove an already present SameSite cookie attribute for unsupported browsers http-response replace-value Set-Cookie (^.*)(?i);\ *SameSite=(Lax|Strict|None)(.*$) \1\3 if no_same_site_uas - # Log whether the no_same_site_uas ACL has been hit + # Log whether the no_same_site_uas ACL has been hit http-request set-header samesitesupport samesite_notsupported if no_same_site_uas http-request set-header samesitesupport samesite_supported if !no_same_site_uas # We need a dummy backend in order to be able to rewrite the loadbalancer cookies @@ -66,7 +66,7 @@ frontend local_ip acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsunrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -82,7 +82,7 @@ frontend local_ip http-request capture sc_http_req_rate(0) len 4 # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -96,7 +96,7 @@ frontend local_ip http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -111,8 +111,8 @@ frontend local_ip ## ------------------------------------------------------------------- frontend internet_restricted_ip - bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent - bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv4 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent + bind {{ haproxy_sni_ip_restricted.ipv6 }}:443 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 alpn h2,http/1.1 transparent bind {{ haproxy_sni_ip_restricted.ipv4 }}:80 transparent bind {{ haproxy_sni_ip_restricted.ipv6 }}:80 transparent # Logging is done in the local_ip_restriced backend, otherwise all requests are logged twice @@ -128,8 +128,8 @@ frontend internet_restricted_ip # We redirect all port 80 to port 443 http-request redirect scheme https code 301 if !{ ssl_fc } # Log the user agent in the httplogs - capture request header User-agent len 256 - # Put the useragent header in a variable, shared between request and response. + capture request header User-agent len 256 + # Put the useragent header in a variable, shared between request and response. http-request set-var(txn.useragent) req.fhdr(User-Agent) # The ACL below makes sure only supported http methods are allowed acl valid_method method {{ haproxy_supported_http_methods }} @@ -155,12 +155,12 @@ frontend internet_restricted_ip # frontend restricted ip addresses localhost # traffic coming back from the dummy backend ends up here # ------------------------------------------------------------------- -frontend localhost_restricted +frontend localhost_restricted bind 127.0.0.1:82 accept-proxy acl valid_vhost hdr(host) -f /etc/haproxy/acls/validvhostsrestricted.acl acl staging req.cook(staging) -m str true acl staging src -f /etc/haproxy/acls/stagingips.acl - acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map + acl stagingvhost hdr(host) -i -M -f /etc/haproxy/maps/backendsstaging.map use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backendsstaging.map)] if stagingvhost staging use_backend %[req.hdr(host),lower,map(/etc/haproxy/maps/backends.map)] option httplog @@ -177,7 +177,7 @@ frontend localhost_restricted # Create an ACL when the request rate exceeds {{ haproxy_max_request_rate }} per 10s acl exceeds_max_request_rate_per_ip sc_http_req_rate(0) gt {{ haproxy_max_request_rate }} http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Measure and log the request rate per path and ip + # Measure and log the request rate per path and ip http-request track-sc1 base32+src table st_httpreqs_per_ip_and_path http-request capture sc_http_req_rate(1) len 4 # Some paths allow for a higher ratelimit. These are in a seperate mapfile @@ -191,7 +191,7 @@ frontend localhost_restricted http-request deny if ! valid_vhost # Deny the request when the request rate exceeds {{ haproxy_max_request_rate }} per 10s http-request deny deny_status 429 if exceeds_max_request_rate_per_ip !allowlist - # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m + # Deny the request when the request rate per host header url path and src ip exceeds {{ haproxy_max_request_rate_ip_path }} per 1 m http-request deny deny_status 429 if exceeds_max_request_rate_per_ip_and_path !allowlist # Create some http redirects {% if haproxy_securitytxt_target_url is defined %} @@ -201,3 +201,19 @@ frontend localhost_restricted http-request redirect location %[base,map_reg(/etc/haproxy/maps/redirects.map)] if { base,map_reg(/etc/haproxy/maps/redirects.map) -m found } {% endif %} + +{% if haproxy_ldap_servers is defined %} +#-------------------------------------------------------------------- +# frontend public ips ldap +# ------------------------------------------------------------------- +listen ldap + mode tcp + no option dontlognull + option tcplog + option logasap + timeout client 900s + timeout server 901s + bind {{ haproxy_sni_ip.ipv4 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + bind {{ haproxy_sni_ip.ipv6 }}:636 ssl crt-list /etc/haproxy/maps/certlist.lst ssl crt /etc/haproxy/certs/ no-sslv3 no-tlsv10 no-tlsv11 transparent + use_backend ldap_servers +{% endif %} diff --git a/roles/ldap/defaults/main.yml b/roles/ldap/defaults/main.yml new file mode 100644 index 000000000..e60ab603c --- /dev/null +++ b/roles/ldap/defaults/main.yml @@ -0,0 +1,41 @@ +--- +ldap: "{{ ldap_defaults | combine(ldap_overrides) }}" + +ldap_defaults: + image: "ghcr.io/surfscz/sram-ldap:main" + conf_dir: "{{ current_release_appdir }}/ldap" + ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" + certs_dir: "{{ ldap_defaults.conf_dir }}/certs" + backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" + data_dir: "{{ ldap_defaults.conf_dir}}/data" + uri: "ldap://localhost/" + + user: "openldap" + group: "openldap" + + # admin_group: "ldap_admin" + admins: + - name: Admin + uid: admin + pw_hash: + sshkey: "" + + loglevel: "stats stats2 filter" + + services_password: secret + monitor_password: secret + ldap_monitor_password: secret + + uri: "ldap://localhost/" + rid_prefix: "ldap://" + + base_domain: "{{ base_domain }}" + base_dn: >- + {{ ((ldap_defaults.base_domain.split('.')|length)*['dc=']) | + zip(ldap_defaults.base_domain.split('.')) | list | map('join', '') | list | join(',') }} + services_dn: + basedn: "dc=services,{{ ldap_defaults.base_dn }}" + o: "Services" + binddn: "cn=admin,{{ ldap_defaults.base_dn }}" + + hosts: {} diff --git a/roles/ldap/files/eduMember.ldif b/roles/ldap/files/eduMember.ldif new file mode 100644 index 000000000..42894d596 --- /dev/null +++ b/roles/ldap/files/eduMember.ldif @@ -0,0 +1,27 @@ +dn: cn=eduMember,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduMember +# Internet X.500 Schema for Ldappc +# Includes the eduMember ObjectClass schema +# +# +# An auxiliary object class, "eduMember," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attributes specified as belonging to the +# object class are "isMemberOf" and "hasMember." +# +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.1 + NAME 'isMemberOf' + DESC 'identifiers for groups to which containing entity belongs' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.5.1.2 + NAME 'hasMember' + DESC 'identifiers for entities that are members of the group' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.5.2.1 + NAME 'eduMember' + AUXILIARY + MAY ( isMemberOf $ hasMember ) + ) diff --git a/roles/ldap/files/eduPerson.ldif b/roles/ldap/files/eduPerson.ldif new file mode 100644 index 000000000..e4f2c96a0 --- /dev/null +++ b/roles/ldap/files/eduPerson.ldif @@ -0,0 +1,83 @@ +dn: cn=eduperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: eduperson +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.1 + NAME 'eduPersonAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.7 + NAME 'eduPersonEntitlement' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.2 + NAME 'eduPersonNickName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.3 + NAME 'eduPersonOrgDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.4 + NAME 'eduPersonOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.5 + NAME 'eduPersonPrimaryAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.8 + NAME 'eduPersonPrimaryOrgUnitDN' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.6 + NAME 'eduPersonPrincipalName' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.12 + NAME 'eduPersonPrincipalNamePrior' + DESC 'eduPersonPrincipalNamePrior per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.9 + NAME 'eduPersonScopedAffiliation' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.10 + NAME 'eduPersonTargetedID' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.11 + NAME 'eduPersonAssurance' + DESC 'eduPerson per Internet2 and EDUCAUSE' + EQUALITY caseExactMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.13 + NAME 'eduPersonUniqueId' + DESC 'eduPersonUniqueId per Internet2' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 SINGLE-VALUE ) +olcAttributeTypes: ( 1.3.6.1.4.1.5923.1.1.1.16 + NAME 'eduPersonOrcid' + DESC 'ORCID researcher identifiers belonging to the principal' + EQUALITY caseIgnoreMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 ) +olcObjectClasses: ( 1.3.6.1.4.1.5923.1.1.2 + NAME 'eduPerson' + AUXILIARY + MAY ( + eduPersonAffiliation $ eduPersonNickname $ eduPersonOrgDN $ + eduPersonOrgUnitDN $ eduPersonPrimaryAffiliation $ + eduPersonPrincipalName $ eduPersonEntitlement $ eduPersonPrimaryOrgUnitDN $ + eduPersonScopedAffiliation $ eduPersonTargetedID $ eduPersonAssurance $ + eduPersonPrincipalNamePrior $ eduPersonUniqueId $ eduPersonOrcid ) + ) diff --git a/roles/ldap/files/groupOfMembers.ldif b/roles/ldap/files/groupOfMembers.ldif new file mode 100644 index 000000000..aa10094d3 --- /dev/null +++ b/roles/ldap/files/groupOfMembers.ldif @@ -0,0 +1,19 @@ +# Internet X.500 Schema for Ldappc +# Includes the groupOfMembers ObjectClass schema +# +# Taken from RFC2307bis draft 2 +# https://tools.ietf.org/html/draft-howard-rfc2307bis-02 +# +# An structural object class, "groupOfMembers" is a convenient container +# for an extensible set of attributes concerning group memberships. +# +dn: cn=groupOfMembers,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: groupOfMembers +olcObjectClasses: ( 1.3.6.1.1.1.2.18 SUP top STRUCTURAL + NAME 'groupOfMembers' + DESC 'A group with members (DNs)' + MUST cn + MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ + description $ member ) + ) diff --git a/roles/ldap/files/ldap-add b/roles/ldap/files/ldap-add new file mode 100644 index 000000000..3d0c5e487 --- /dev/null +++ b/roles/ldap/files/ldap-add @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Copyright (C) 2015-2019 Maciej Delmanowski +# Copyright (C) 2015-2019 DebOps +# SPDX-License-Identifier: GPL-3.0-only + +# Check if specified LDAP schema file is loaded in the local slapd cn=config +# database. If not, try loading it in the server. + + +set -o nounset -o pipefail -o errexit + +schema_file="${1}" + +if [ -z "${schema_file}" ] ; then + printf "Error: You need to specify schema file to load\\n" && exit 1 +fi + +if [ ! -e "${schema_file}" ] ; then + printf "Error: %s does not exist\\n" "${schema_file}" && exit 1 +fi + +if [ ! -r "${schema_file}" ] ; then + printf "Error: %s is unreadable\\n" "${schema_file}" && exit 1 +fi + +# The schema file is already converted, we can deal with them directly +if [[ "${schema_file}" == *.ldif ]] ; then + + # Get the DN of the schema + schema_dn="$(grep -E '^^dn:\s' "${schema_file}")" + + # Get list of already installed schemas from local LDAP server + schema_list() { + ldapsearch -Y EXTERNAL -H ldapi:/// -LLLQ -b 'cn=schema,cn=config' dn \ + | sed -e '/^$/d' -e 's/{[0-9]\+}//' + } + + if schema_list | grep -q "${schema_dn}" ; then + + # Schema is already installed, do nothing + exit 80 + + else + + # Try installing the schema in the database + ldapadd -Y EXTERNAL -H ldapi:/// -f "${schema_file}" + + fi + +fi diff --git a/roles/ldap/files/ldapPublicKey.ldif b/roles/ldap/files/ldapPublicKey.ldif new file mode 100644 index 000000000..8968b6e96 --- /dev/null +++ b/roles/ldap/files/ldapPublicKey.ldif @@ -0,0 +1,21 @@ +dn: cn=openssh-lpk-openldap,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: openssh-lpk-openldap +# +# LDAP Public Key Patch schema for use with openssh-ldappubkey +# useful with PKA-LDAP also +# +# Author: Eric AUGE +# +# Based on the proposal of : Mark Ruijter +# +# octetString SYNTAX +olcAttributeTypes: ( 1.3.6.1.4.1.24552.500.1.1.1.13 NAME 'sshPublicKey' + DESC 'MANDATORY: OpenSSH Public key' + EQUALITY octetStringMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 ) +# printableString SYNTAX yes|no +olcObjectClasses: ( 1.3.6.1.4.1.24552.500.1.1.2.0 NAME 'ldapPublicKey' SUP top AUXILIARY + DESC 'MANDATORY: OpenSSH LPK olcObjectClasses:' + MUST ( sshPublicKey $ uid ) + ) diff --git a/roles/ldap/files/logrotate_slapd b/roles/ldap/files/logrotate_slapd new file mode 100644 index 000000000..f225a935f --- /dev/null +++ b/roles/ldap/files/logrotate_slapd @@ -0,0 +1,13 @@ +/var/log/slapd.log +{ + rotate 7 + daily + missingok + notifempty + delaycompress + compress + postrotate + invoke-rc.d rsyslog rotate > /dev/null + endscript +} + diff --git a/roles/ldap/files/rsyslog_slapd.conf b/roles/ldap/files/rsyslog_slapd.conf new file mode 100644 index 000000000..a3435617f --- /dev/null +++ b/roles/ldap/files/rsyslog_slapd.conf @@ -0,0 +1,2 @@ +if $programname == 'slapd' then /var/log/slapd.log +if $programname == 'slapd' then ~ diff --git a/roles/ldap/files/sczGroup.ldif b/roles/ldap/files/sczGroup.ldif new file mode 100644 index 000000000..d1b5cb332 --- /dev/null +++ b/roles/ldap/files/sczGroup.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sczGroup ObjectClass schema +# +# An auxiliary object class, "sczGroup," is a convenient container +# for an extensible set of attributes concerning group memberships. +# At this time, the only attribute specified as belonging to the +# object class is "sczMember." +# +# It is specifically configured to support the memberOf overlay. +# +dn: cn=sczGroup,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sczGroup +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.40.50.1.1 + NAME 'sczMember' + DESC 'DN identifiers for entities that are members of the group' + EQUALITY distinguishedNameMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.40.50.1 + NAME 'sczGroup' + AUXILIARY + MAY ( sczMember ) + ) diff --git a/roles/ldap/files/sramPerson.ldif b/roles/ldap/files/sramPerson.ldif new file mode 100644 index 000000000..e194381d1 --- /dev/null +++ b/roles/ldap/files/sramPerson.ldif @@ -0,0 +1,23 @@ +# Internet X.500 Schema for Ldappc +# Includes the sramPerson ObjectClass schema +# +# An auxiliary object class, "sramPerson," is a convenient container +# for an extensible set of attributes concerning sram persons. +# At this time, the only attribute specified as belonging to the +# object class is "sramInactiveDays". +# +dn: cn=sramPerson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: sramPerson +olcAttributeTypes: ( 1.3.6.1.4.1.1076.20.100.20.2.1 NAME 'sramInactiveDays' + DESC 'Number of days this entity was inactive' + EQUALITY IntegerMatch + ORDERING IntegerOrderingMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 + ) +olcObjectClasses: ( 1.3.6.1.4.1.1076.20.100.20.1.1 NAME 'sramPerson' + AUXILIARY + MAY ( + sramInactiveDays + ) + ) diff --git a/roles/ldap/files/voPerson.ldif b/roles/ldap/files/voPerson.ldif new file mode 100644 index 000000000..bdce11ed8 --- /dev/null +++ b/roles/ldap/files/voPerson.ldif @@ -0,0 +1,44 @@ +dn: cn=voperson,cn=schema,cn=config +objectClass: olcSchemaConfig +cn: voperson +olcAttributeTypes: {0}( 1.3.6.1.4.1.34998.3.3.1.1 NAME 'voPersonApplicationUID + ' DESC 'voPerson Application-Specific User Identifier' EQUALITY caseIgnoreMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {1}( 1.3.6.1.4.1.34998.3.3.1.2 NAME 'voPersonAuthorName' DE + SC 'voPerson Author Name' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.1 + 15.121.1.15' ) +olcAttributeTypes: {2}( 1.3.6.1.4.1.34998.3.3.1.3 NAME 'voPersonCertificateDN' + DESC 'voPerson Certificate Distinguished Name' EQUALITY distinguishedNameMat + ch SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {3}( 1.3.6.1.4.1.34998.3.3.1.4 NAME 'voPersonCertificateIss + uerDN' DESC 'voPerson Certificate Issuer DN' EQUALITY distinguishedNameMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.12' ) +olcAttributeTypes: {4}( 1.3.6.1.4.1.34998.3.3.1.5 NAME 'voPersonExternalID' DE + SC 'voPerson Scoped External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3 + .6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {5}( 1.3.6.1.4.1.34998.3.3.1.6 NAME 'voPersonID' DESC 'voPe + rson Unique Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115 + .121.1.15' ) +olcAttributeTypes: {6}( 1.3.6.1.4.1.34998.3.3.1.7 NAME 'voPersonPolicyAgreemen + t' DESC 'voPerson Policy Agreement Indicator' EQUALITY caseIgnoreMatch SYNTAX + '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {7}( 1.3.6.1.4.1.34998.3.3.1.8 NAME 'voPersonSoRID' DESC 'v + oPerson External Identifier' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.146 + 6.115.121.1.15' ) +olcAttributeTypes: {8}( 1.3.6.1.4.1.34998.3.3.1.9 NAME 'voPersonStatus' DESC ' + voPerson Status' EQUALITY caseIgnoreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1. + 15' ) +olcAttributeTypes: {9}( 1.3.6.1.4.1.34998.3.3.1.10 NAME 'voPersonAffiliation' + DESC 'voPerson Affiliation Within Local Scope' EQUALITY caseIgnoreMatch SYNTA + X '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {10}( 1.3.6.1.4.1.34998.3.3.1.11 NAME 'voPersonExternalAffi + liation' DESC 'voPerson Scoped External Affiliation' EQUALITY caseIgnoreMatch + SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcAttributeTypes: {11}( 1.3.6.1.4.1.34998.3.3.1.12 NAME 'voPersonScopedAffili + ation' DESC 'voPerson Affiliation With Explicit Local Scope' EQUALITY caseIgn + oreMatch SYNTAX '1.3.6.1.4.1.1466.115.121.1.15' ) +olcObjectClasses: {0}( 1.3.6.1.4.1.34998.3.3.1 NAME 'voPerson' AUXILIARY MAY ( + voPersonAffiliation $ voPersonApplicationUID $ voPersonAuthorName $ voPerson + CertificateDN $ voPersonCertificateIssuerDN $ voPersonExternalAffiliation $ v + oPersonExternalID $ voPersonID $ voPersonPolicyAgreement $ voPersonScopedAffi + liation $ voPersonSoRID $ voPersonStatus ) ) diff --git a/roles/ldap/handlers/main.yml b/roles/ldap/handlers/main.yml new file mode 100644 index 000000000..0510176a6 --- /dev/null +++ b/roles/ldap/handlers/main.yml @@ -0,0 +1,24 @@ +--- +- name: restart rsyslog + service: + name: rsyslog + state: restarted + listen: "restart rsyslog" + +- name: systemd daemon-reload + systemd: + name: slapd + daemon_reload: yes + +- name: restart LDAP + systemd: + name: slapd + state: restarted + enabled: true + daemon-reload: true + +- name: Restart the ldap container + community.docker.docker_container: + name: "{{ containers.ldap }}" + restart: true + state: started diff --git a/roles/ldap/tasks/admins.yml b/roles/ldap/tasks/admins.yml new file mode 100644 index 000000000..e00115c04 --- /dev/null +++ b/roles/ldap/tasks/admins.yml @@ -0,0 +1,82 @@ +--- +# - name: Initialize DIT admin +# community.general.ldap_entry: +# dn: "{{ services_ldap.binddn }}" +# objectClass: organizationalRole +# attributes: +# cn: "{{ services_ldap.binddn | regex_replace('^cn=([^,]+).*', '\\1') }}" + +# determine which users need to be admin +# check for each role of each user if it leads to membership of group {{ldap_admin_group}} +# - name: determine ldap admins +# set_fact: +# ldap_admins: "{{ ldap_admins | default([]) + [item.0] }}" +# when: ldap_admin_group in role_to_groups[item.1] or ldap_admin_group in item.0.groups +# loop: "{{ users | subelements('roles') }}" + +- name: determine ldap admins + set_fact: + ldap_admins: "{{ ldap.admins }}" + +# Find existing ldap admins +- name: Initialize admins (I) + community.general.ldap_search: + dn: "{{ ldap.services_dn.basedn }}" + scope: "onelevel" + filter: "(objectClass=organizationalRole)" + attrs: + - "cn" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + register: "existing_ldap_admins_result" + +# ansible sucks like this: we need to extract the results from the result +- name: Initialize admins (Ia) + set_fact: + existing_ldap_admins: "{{ existing_ldap_admins_result.results }}" + +- debug: + var: "existing_ldap_admins" +- debug: + var: "ldap_admins" + +# Remove LDAP non-admins +- name: Initialize admins (II) + community.general.ldap_entry: + dn: "cn={{ item.cn }},{{ services_ldap.basedn }}" + state: absent + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + when: > + item.cn not in ldap_admins | map(attribute='uid') + and item.cn != 'admin' + loop: "{{existing_ldap_admins}}" + +# Insert LDAP admins +- name: Initialize admins (III) + community.general.ldap_entry: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + objectClass: + - simpleSecurityObject + - organizationalRole + attributes: + description: An LDAP administrator + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + +# Make sure passwords are updated for existing admins +- name: Initialize admins (IV) + community.general.ldap_attrs: + dn: "cn={{ item.uid }},{{ ldap.services_dn.basedn }}" + attributes: + userPassword: "{{ item.pw_hash }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ldap.uri }}" + loop: "{{ ldap_admins }}" + diff --git a/roles/ldap/tasks/main.yml b/roles/ldap/tasks/main.yml new file mode 100644 index 000000000..f58c21ee7 --- /dev/null +++ b/roles/ldap/tasks/main.yml @@ -0,0 +1,369 @@ +--- +# playbook to install and configure all components of the LDAP +- name: Install LDAP utils + apt: + state: "present" + name: + - "python3-ldap" # for ansible ldap modules + install_recommends: false + +- name: Ensure that a number of directories exist + file: + path: "{{ item.path }}" + state: "directory" + # owner: "{{ ldap.user }}" + # group: "{{ ldap.group }}" + mode: "{{ item.mode }}" + with_items: + - { path: "{{ldap.ldif_dir}}", mode: "0755" } + - { path: "{{ldap.certs_dir}}", mode: "0755" } + - { path: "{{ldap.data_dir}}", mode: "0777" } + notify: Restart the ldap container + +- name: Copy schemas + copy: + src: "{{ item }}" + dest: "{{ ldap.ldif_dir }}/{{ item }}" + mode: "0644" + with_items: + - sczGroup.ldif + - groupOfMembers.ldif + - eduPerson.ldif + - ldapPublicKey.ldif + - eduMember.ldif + - voPerson.ldif + - sramPerson.ldif + notify: Restart the ldap container + +- name: Copying ldap-add script + copy: + src: "{{ item }}" + dest: "{{ ldap.conf_dir }}/{{ item }}" + mode: "0755" + with_items: + - ldap-add + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Copy wildcard frontend cert +# copy: +# src: "/etc/ssl/certs/sram-https.pem" # was installed here by update-ca-certificates +# remote_src: true +# dest: "{{ldap.certs_dir}}/frontend.crt" +# mode: "0644" +# when: "is_dev" +# notify: Restart the ldap container + +- name: Setup ldap hosts + vars: + host: + key: "%s.{{ ldap.base_domain }}" + value: "%s" + etc_hosts: {} + set_fact: + etc_hosts: >- + {{ etc_hosts | + combine({ host.key | format(item.key): host.value | format(item.value) }) }} + with_dict: "{{ ldap.hosts }}" + +- name: Create the ldap container + community.docker.docker_container: + name: "ldap" + image: "{{ ldap.image }}" + restart_policy: "always" + state: started + pull: true + ports: + - 0.0.0.0:389:389 + env: + LDAP_ORGANISATION: "{{ env }}" + LDAP_DOMAIN: "{{ ldap.base_domain }}" + LDAP_ROOTPASS: "{{ ldap.services_password }}" + etc_hosts: "{{ etc_hosts }}" + volumes: + # For now the target side /opt/ldap is hard-coded + - "{{ ldap.conf_dir }}:/opt/ldap" + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.tcp.routers.ldap.entrypoints: "ldaps" + traefik.tcp.routers.ldap.rule: "HostSNI(`*`)" + traefik.tcp.routers.ldap.tls: "true" + traefik.tcp.services.ldap.loadbalancer.server.port: "389" + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ -S /var/run/slapd/ldapi ]]" + register: "ldap_container" + +- name: Wait for LDAP initialization + ansible.builtin.wait_for: + port: 389 + delay: 5 + +- name: Wait for 5 seconds + ansible.builtin.wait_for: + timeout: 5 + when: "ldap_container is changed" + +- name: Ensure the schemas are added to LDAP + ansible.builtin.shell: + # For now the target side /opt/ldap is hard-coded + cmd: "docker exec ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" + register: "result" + failed_when: "result.rc not in [0,80]" + changed_when: "result.rc != 80" + become: true + loop: + - "sczGroup.ldif" + - "groupOfMembers.ldif" + - "eduPerson.ldif" + - "ldapPublicKey.ldif" + - "eduMember.ldif" + - "voPerson.ldif" + - "sramPerson.ldif" + +- name: Set indices + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcDbIndex: "{{item}}" + state: "present" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + with_items: + - "entryUUID eq" + - "o eq" + - "dc eq" + - "entryCSN eq" + +- name: Set olcDatabase={-1}frontend olcSizeLimit + community.general.ldap_attrs: + dn: "olcDatabase={-1}frontend,cn=config" + state: "exact" + attributes: + olcSizeLimit: "unlimited" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Set config + community.general.ldap_attrs: + dn: "cn=config" + state: "present" + attributes: + olcServerID: "{{ ldap.server_id }}" + olcSizeLimit: "unlimited" + olcLogLevel: "{{ ldap.loglevel }}" + olcAttributeOptions: "time-" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# # cert is used for communication between ldap for sync +# # is generated in roles/certificates/tasks/main.yml +# - name: Set TLS config +# community.general.ldap_attrs: +# dn: "cn=config" +# state: "exact" +# attributes: +# olcTLSCACertificateFile: "/opt/ldap/certs/frontend.crt" +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ ldap.services_password }}" +# server_uri: "{{ ldap.uri }}" + +- name: Setup Modules + community.general.ldap_attrs: + dn: cn=module{0},cn=config + attributes: + olcModuleLoad: + - syncprov + - dynlist.so + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Dynlist + community.general.ldap_entry: + dn: olcOverlay=dynlist,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcDynamicList + attributes: + olcDlAttrSet: "voPerson labeledURI member+memberOf@groupOfMembers" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +- name: Setup Syncprov + community.general.ldap_entry: + dn: olcOverlay=syncprov,olcDatabase={1}mdb,cn=config + objectClass: + - olcOverlayConfig + - olcSyncProvConfig + attributes: + olcSpCheckpoint: 100 10 + olcSpSessionLog: 100 + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# Leave this here in case we do want to build our own +# root database from scratch instead of relying on the +# domain based Debian slapd package setup +# +# - name: Setup main database +# community.general.ldap_attrs: +# dn: olcDatabase={1}mdb,cn=config +# attributes: +# olcSuffix: "{{ services_ldap.basedn }}" +# olcRootDN: "{{ services_ldap.binddn }}" +# olcRootPW: "{{ '%s' | format(services_ldap_password) | slapd_hash }}" +# state: exact +# +# - name: Set root credentials +# community.general.ldap_attrs: +# dn: olcDatabase={0}config,cn=config +# attributes: +# olcAccess: >- +# {0}to * +# by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage +# {% if environment_name=="vm" %} +# by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage +# {% endif %} +# by dn.exact="{{ services_ldap.binddn }}" manage +# by * break +# state: exact +# bind_dn: "cn=admin,cn=config" +# bind_pw: "{{ services_ldap_password }}" +# server_uri: "{{ ldap_uri }}" +# +# +# - name: Get uid of openldap user +# ansible.builtin.getent: +# database: "passwd" +# key: "openldap" +# +# # store it in a nice var (so line below doesn't get too long) +# - name: Save gid of openldap user +# ansible.builtin.set_fact: +# openldap_auth: "gidNumber={{ansible_facts.getent_passwd['openldap'][2]}}+\ +# uidNumber={{ansible_facts.getent_passwd['openldap'][1]}}" +# + +- name: Set ACLs + community.general.ldap_attrs: + dn: "olcDatabase={1}mdb,cn=config" + attributes: + olcAccess: + - >- + to dn.regex="(([^,]+),{{ ldap.services_dn.basedn }})$" + by dn.exact="{{ ldap.services_dn.binddn }}" write + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth write + by dn.exact,expand="cn=admin,$1" read + by * break + - >- + to * + by dn.exact=gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth manage + by dn.regex="cn=[^,]+,{{ ldap.services_dn.basedn }}" read + {% if env=="vm" %} + by dn.exact=gidNumber=1000+uidNumber=1000,cn=peercred,cn=external,cn=auth manage + {% endif %} + by * break + - >- + to attrs=userPassword + by self write + by anonymous auth + by * break + state: exact + ordered: true + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# ldap_rids: +# 101: ldaps://ldap1.scz-vm.net/ +# 102: ldaps://ldap2.scz-vm.net/ + +- name: Set rids + vars: + start: 101 + rid: + key: "%d" + value: "{{ ldap.rid_prefix }}%s.{{ ldap.base_domain }}/" + ldap_rids: {} + set_fact: + ldap_rids: >- + {{ ldap_rids | combine({ rid.key | format(start|int): + rid.value | format(item.key) }) }} + start: "{{ start|int + 1 }}" + with_dict: "{{ ldap.hosts | dict2items | sort(attribute='key') }}" + +# Voor toekomstige Claude gebruikers: onderstaande construct levert aan het eind +# een string representatie van de dict op, die niet meer gebruikt kan worden +# in de hieropvolgende 'Setup rids' task... +# - name: Set rids +# set_fact: +# ldap_rids: >- +# {%- set result = {} %} +# {%- for host in (ldap_hosts | dict2items | sort(attribute='key')) %} +# {%- set _ = result.update({(101 + loop.index0)|string: \ +# ldap_rid_prefix ~ host.key ~ '.' ~ base_domain ~ '/'}) %} +# {%- endfor %} +# {{ result }} + +- name: Setup rids + vars: + rid: >- + rid={} + provider="{}" + searchbase="{{ ldap.services_dn.basedn }}" + type=refreshAndPersist + bindmethod=simple + binddn="{{ ldap.services_dn.binddn }}" + credentials={{ ldap.services_password }} + retry="30 +" + timeout=30 + network-timeout=5 + rids: [] + set_fact: + rids: "{{ rids + [ rid.format(item.key, item.value) ] }}" + with_dict: "{{ dict(ldap_rids) }}" + +- name: Setup Syncrepl + community.general.ldap_attrs: + dn: olcDatabase={1}mdb,cn=config + attributes: + olcSyncrepl: "{{ rids }}" + olcMultiProvider: "TRUE" + bind_dn: "cn=admin,cn=config" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + +# We now have Syncrepl in place, so only write to primary +- name: Initialize DIT + community.general.ldap_entry: + dn: "{{ ldap.services_dn.basedn }}" + state: "present" + objectClass: + - "top" + - "dcObject" + - "organization" + attributes: + dc: "{{ ldap.services_dn.basedn | regex_replace('^dc=([^,]+).*', '\\1') }}" + o: "{{ ldap.services_dn.o }}" + bind_dn: "{{ ldap.services_dn.binddn }}" + bind_pw: "{{ ldap.services_password }}" + server_uri: "{{ ldap.uri }}" + when: > + inventory_hostname in groups['ldap_primary'] + +# We now have Syncrepl in place, so only write to primary +- name: Add ldap admins + include_tasks: "admins.yml" + when: > + inventory_hostname in groups['ldap_primary'] diff --git a/roles/ldap/templates/ldap-backup.sh.j2 b/roles/ldap/templates/ldap-backup.sh.j2 new file mode 100644 index 000000000..2c6aa9201 --- /dev/null +++ b/roles/ldap/templates/ldap-backup.sh.j2 @@ -0,0 +1,19 @@ +#!/bin/bash +# vim:ft=sh +set -e + +if [ $UID -ne 0 ] +then + echo "Sorry, this script must run as root" + exit 1 +fi + +BACKUP_DIR="{{ldap_backup_dir}}" +BACKUP_FILE="$BACKUP_DIR/ldap_$(/bin/date +%Y-%m-%d_%H:%M)" + +mkdir -p -m 0755 "$BACKUP_DIR" + +/usr/sbin/slapcat -o ldif-wrap=no -n0 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db0.ldif.bz2" +/usr/sbin/slapcat -o ldif-wrap=no -n1 | /bin/bzip2 -c6 > "${BACKUP_FILE}.db1.ldif.bz2" + +exit 0 diff --git a/roles/ldap/templates/ldap.conf.j2 b/roles/ldap/templates/ldap.conf.j2 new file mode 100644 index 000000000..d7fa7c227 --- /dev/null +++ b/roles/ldap/templates/ldap.conf.j2 @@ -0,0 +1,16 @@ +# +# LDAP Defaults +# + +# See ldap.conf(5) for details +# This file should be world readable but not world writable. + +#BASE dc=example,dc=com +#URI ldap://ldap.example.com ldap://ldap-master.example.com:666 + +#SIZELIMIT 12 +#TIMELIMIT 15 +#DEREF never + +# TLS certificates (needed for GnuTLS) +TLS_CACERT {{ ssl_certs_dir }}/{{ internal_base_domain }}.crt diff --git a/roles/ldap/templates/slapd.service.j2 b/roles/ldap/templates/slapd.service.j2 new file mode 100644 index 000000000..7e0f79397 --- /dev/null +++ b/roles/ldap/templates/slapd.service.j2 @@ -0,0 +1,20 @@ +[Unit] +Description = LDAP server + +[Service] +Type = forking +User = root +SupplementaryGroups = ssl-cert +ExecStartPre=-/bin/mkdir -p /var/run/slapd +ExecStartPre=-/bin/chown openldap. /var/run/slapd +ExecStart = /usr/sbin/slapd -F /etc/ldap/slapd.d -u openldap -g openldap -h 'ldapi:/// ldap://localhost/ ldaps://{{inventory_hostname}}/' +Restart = always +RestartSec = 30 +PIDFile = /run/slapd/slapd.pid +# defaults are 1024:524288 which is too small for slapd +# see https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=378261 and +# https://www.openldap.org/lists/openldap-software/200802/msg00186.html +LimitNOFILE=4096:524288 + +[Install] +WantedBy = multi-user.target diff --git a/roles/ldap/vars/main.yml b/roles/ldap/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/ldap/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index c42b80948..9bcd62c87 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -3,7 +3,6 @@ sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" sbs_defaults: base_domain: "test2.sram.surf.nl" - environment_name: test ansible_nolog: true base_url: "https://{{ sbs_defaults.base_domain }}" server_image: "ghcr.io/surfscz/sram-sbs-server:main" diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index eba293de9..d052565c2 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -3,7 +3,7 @@ database: uri: {{ sbs.db_connection_sbs }} redis: -{% if sbs.environment_name == 'test2' %} +{% if env == 'test2' %} uri: "redis://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{sbs.redis_host}}/" {% else %} uri: "redis{% if sbs.redis_ssl %}s{% endif %}://{{ sbs.redis_user }}:{{ sbs.redis_password }}@{{ sbs.redis_host }}:{{ sbs.redis_port }}/" @@ -54,7 +54,7 @@ eppn_scope: " {{ sbs.eppn_scope }}" scim_schema_sram: "urn:mace:surf.nl:sram:scim:extension" collaboration_creation_allowed_entitlement: "urn:mace:surf.nl:sram:allow-create-co" -{% if sbs.environment_name == "prd" %} +{% if env == "prd" %} environment_disclaimer: "" {% else %} environment_disclaimer: "{{ sbs.disclaimer_label }}" diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sbs/templates/disclaimer.css.j2 index 04c62aa86..7922f5e5b 100644 --- a/roles/sbs/templates/disclaimer.css.j2 +++ b/roles/sbs/templates/disclaimer.css.j2 @@ -1,4 +1,4 @@ -{% if sbs.environment_name!="prd" -%} +{% if env!="prd" -%} body::after { background: {{ sbs.disclaimer_color }}; content: "{{ sbs.disclaimer_label }}"; From e999fad981a7d6cfd621f8a6faca7d27c7da1b3c Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 14:55:12 +0100 Subject: [PATCH 5/7] Add plsc and mailpit roles --- roles/ldap/defaults/main.yml | 2 +- roles/mailpit/defaults/main.yml | 8 +++++++ roles/mailpit/tasks/main.yml | 37 +++++++++++++++++++++++++++++++ roles/plsc/defaults/main.yml | 15 +++++++++++++ roles/plsc/handlers/main.yml | 18 +++++++++++++++ roles/plsc/tasks/main.yml | 28 +++++++++++++++++++++++ roles/plsc/templates/plsc.yml.j2 | 25 +++++++++++++++++++++ roles/plsc/vars/main.yml | 1 + roles/sbs/defaults/main.yml | 4 +++- roles/sbs/templates/config.yml.j2 | 2 +- 10 files changed, 137 insertions(+), 3 deletions(-) create mode 100644 roles/mailpit/defaults/main.yml create mode 100644 roles/mailpit/tasks/main.yml create mode 100644 roles/plsc/defaults/main.yml create mode 100644 roles/plsc/handlers/main.yml create mode 100644 roles/plsc/tasks/main.yml create mode 100644 roles/plsc/templates/plsc.yml.j2 create mode 100644 roles/plsc/vars/main.yml diff --git a/roles/ldap/defaults/main.yml b/roles/ldap/defaults/main.yml index e60ab603c..7d02ebe4a 100644 --- a/roles/ldap/defaults/main.yml +++ b/roles/ldap/defaults/main.yml @@ -1,5 +1,5 @@ --- -ldap: "{{ ldap_defaults | combine(ldap_overrides) }}" +ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" ldap_defaults: image: "ghcr.io/surfscz/sram-ldap:main" diff --git a/roles/mailpit/defaults/main.yml b/roles/mailpit/defaults/main.yml new file mode 100644 index 000000000..7647de9dc --- /dev/null +++ b/roles/mailpit/defaults/main.yml @@ -0,0 +1,8 @@ +--- +mailpit: "{{ mailpit_defaults | combine(mailpit_overrides, recursive=true) }}" + +mailpit_defaults: + image: "axllent/mailpit" + hostname: "mailpit.{{ base_domain }}" + user: "mailpit" + group: "mailpit" diff --git a/roles/mailpit/tasks/main.yml b/roles/mailpit/tasks/main.yml new file mode 100644 index 000000000..7fb32d8a7 --- /dev/null +++ b/roles/mailpit/tasks/main.yml @@ -0,0 +1,37 @@ +--- +- name: "Create mailpit group" + group: + name: "{{ mailpit.group }}" + state: "present" + register: "result" + +- name: "Create mailpit user" + user: + name: "{{ mailpit.user }}" + group: "{{ mailpit.group }}" + comment: "User to run Mailpit service" + shell: "/bin/false" + password: "!" + create_home: false + state: "present" + register: "result" + +- name: "Save mailpit user uid" + set_fact: + mailpit_user_uid: "{{ result.uid }}" + +- name: "Create mailpit container" + docker_container: + name: "mailpit" + image: "{{ mailpit.image }}" + restart_policy: "always" + state: "started" + user: "{{ mailpit_user_uid }}" + ports: + networks: + - name: "loadbalancer" + labels: + traefik.enable: "true" + traefik.http.routers.mailpit.rule: "Host(`{{ mailpit.hostname }}`)" + traefik.http.routers.mailpit.tls: "true" + traefik.http.services.mailpit.loadbalancer.server.port: 8025 diff --git a/roles/plsc/defaults/main.yml b/roles/plsc/defaults/main.yml new file mode 100644 index 000000000..2a3711b0c --- /dev/null +++ b/roles/plsc/defaults/main.yml @@ -0,0 +1,15 @@ +--- +plsc: "{{ plsc_defaults | combine(plsc_overrides, recursive=true) }}" + +plsc_defaults: + image: "ghcr.io/surfscz/sram-plsc:main" + conf_dir: "{{current_release_appdir}}/sram/plsc" + ansible_nolog: false + ldap_uri: "ldap://ldap:389/" + ldap_basedn: "dc=services,dc=vnet" + ldap_binddn: "cn=admin,dc=vnet" + ldap_password: "secret" + sbs_host: "http://sbs-server:8080" + sbs_user: "sysread" + sbs_password: "secret" + retry: 3 diff --git a/roles/plsc/handlers/main.yml b/roles/plsc/handlers/main.yml new file mode 100644 index 000000000..9ce03e899 --- /dev/null +++ b/roles/plsc/handlers/main.yml @@ -0,0 +1,18 @@ +--- +# - name: enable plsc job +# systemd: +# name: "plsc.timer" +# enabled: true +# state: "restarted" +# daemon_reload: true + +# - name: "restart zabbix-agent" +# systemd: +# name: "zabbix-agent2.service" +# state: "restarted" + +- name: Restart the plsc container + community.docker.docker_container: + name: "plsc" + restart: true + state: started diff --git a/roles/plsc/tasks/main.yml b/roles/plsc/tasks/main.yml new file mode 100644 index 000000000..14e7b40b7 --- /dev/null +++ b/roles/plsc/tasks/main.yml @@ -0,0 +1,28 @@ +--- +- name: Make sure clients sync directory exists + file: + path: "{{ plsc.conf_dir }}" + state: directory + mode: "0755" + +- name: "Create plsc.yml source if it doesn't exist" + template: + src: "plsc.yml.j2" + dest: "{{ plsc.conf_dir }}/plsc.yml" + mode: "0640" + no_log: "{{plsc.ansible_nolog}}" + notify: "Restart the plsc container" + +- name: Create the plsc container + community.docker.docker_container: + name: "plsc" + image: "{{ plsc.image }}" + restart_policy: "always" + state: started + pull: true + mounts: + - type: bind + source: "{{ plsc.conf_dir }}/plsc.yml" + target: "/opt/plsc/plsc.yml" + networks: + - name: "loadbalancer" diff --git a/roles/plsc/templates/plsc.yml.j2 b/roles/plsc/templates/plsc.yml.j2 new file mode 100644 index 000000000..a42c00807 --- /dev/null +++ b/roles/plsc/templates/plsc.yml.j2 @@ -0,0 +1,25 @@ +--- +ldap: + src: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 + dst: + uri: "{{ plsc.ldap_uri }}" + basedn: "{{ plsc.ldap_basedn }}" + binddn: "{{ plsc.ldap_binddn }}" + passwd: "{{ plsc.ldap_password }}" + sizelimit: 500 +sbs: + src: + host: "{{ plsc.sbs_host }}" + user: "{{ plsc.sbs_user }}" + passwd: "{{ plsc.sbs_password }}" + verify_ssl: {{ false if env=='vm' else true }} + timeout: 60 + retry: {{ plsc.retry }} +pwd: "{CRYPT}!" +uid: 1000 +gid: 1000 diff --git a/roles/plsc/vars/main.yml b/roles/plsc/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/plsc/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext diff --git a/roles/sbs/defaults/main.yml b/roles/sbs/defaults/main.yml index 9bcd62c87..aedcee959 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sbs/defaults/main.yml @@ -1,5 +1,5 @@ --- -sbs: "{{ sbs_defaults | combine(sbs_overrides) }}" +sbs: "{{ sbs_defaults | combine(sbs_overrides, recursive=true) }}" sbs_defaults: base_domain: "test2.sram.surf.nl" @@ -69,6 +69,8 @@ sbs_defaults: ticket_email: "sram-support@surf.nl" eduteams_email: "eduteams@localhost" + suppress_mails: False + wiki_link: "https://www.example.org/wiki" cron_hour_of_day: 4 diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sbs/templates/config.yml.j2 index d052565c2..7d4c92bf4 100644 --- a/roles/sbs/templates/config.yml.j2 +++ b/roles/sbs/templates/config.yml.j2 @@ -71,7 +71,7 @@ mail: port: {{ sbs.mail_port }} sender_name: {{ sbs.mail_sender_name }} sender_email: {{ sbs.mail_sender_email }} - suppress_sending_mails: False + suppress_sending_mails: {{ sbs.suppress_mails }} info_email: {{ sbs.support_email }} beheer_email: {{ sbs.admin_email }} ticket_email: {{ sbs.ticket_email }} From 94b1b38bba34243463b734490276b011a9a9dd69 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 16:32:42 +0100 Subject: [PATCH 6/7] Add sram-metadata and rename apps+roles --- roles/{ldap => sram-ldap}/defaults/main.yml | 2 +- .../{ldap => sram-ldap}/files/eduMember.ldif | 0 .../{ldap => sram-ldap}/files/eduPerson.ldif | 0 .../files/groupOfMembers.ldif | 0 roles/{ldap => sram-ldap}/files/ldap-add | 0 .../files/ldapPublicKey.ldif | 0 .../{ldap => sram-ldap}/files/logrotate_slapd | 0 .../files/rsyslog_slapd.conf | 0 roles/{ldap => sram-ldap}/files/sczGroup.ldif | 0 .../{ldap => sram-ldap}/files/sramPerson.ldif | 0 roles/{ldap => sram-ldap}/files/voPerson.ldif | 0 roles/{ldap => sram-ldap}/handlers/main.yml | 0 roles/{ldap => sram-ldap}/tasks/admins.yml | 0 roles/{ldap => sram-ldap}/tasks/main.yml | 4 +- .../templates/ldap-backup.sh.j2 | 0 .../templates/ldap.conf.j2 | 0 .../templates/slapd.service.j2 | 0 roles/{ldap => sram-ldap}/vars/main.yml | 0 roles/sram-metadata/defaults/main.yml | 81 +++++++++++++ roles/sram-metadata/files/01_idps.fd | 23 ++++ roles/sram-metadata/files/02_backend.fd | 14 +++ roles/sram-metadata/files/03_frontend.fd | 14 +++ roles/sram-metadata/files/surf.png | Bin 0 -> 16016 bytes roles/sram-metadata/files/surf.svg | 24 ++++ roles/sram-metadata/files/surf_bimi.svg | 15 +++ roles/sram-metadata/files/surfconext.crt | 3 + roles/sram-metadata/files/transform.xslt | 47 ++++++++ .../sram-metadata/files/transform_proxy.xslt | 50 +++++++++ roles/sram-metadata/handlers/main.yml | 19 ++++ roles/sram-metadata/tasks/http.yml | 48 ++++++++ roles/sram-metadata/tasks/main.yml | 49 ++++++++ roles/sram-metadata/tasks/pyff.yml | 106 ++++++++++++++++++ roles/sram-metadata/templates/index.html.j2 | 11 ++ .../templates/pyff-metadata.service.j2 | 12 ++ .../templates/pyff-metadata.timer.j2 | 8 ++ roles/sram-metadata/templates/vhosts.conf.j2 | 15 +++ roles/{plsc => sram-metadata}/vars/main.yml | 0 roles/sram-midproxy/defaults/main.yml | 8 ++ .../files/internal_attributes.yaml | 22 ++++ .../files/plugins/attribute-maps/basic.py | 51 +++++++++ .../plugins/backends/openid_backend.yaml | 14 +++ .../files/plugins/backends/saml2_backend.yaml | 1 + .../plugins/frontends/ping_frontend.yaml | 3 + .../plugins/frontends/saml2_frontend.yaml | 63 +++++++++++ .../microservices/generate_attributes.yaml | 8 ++ .../microservices/regex_attributes.yaml | 10 ++ roles/sram-midproxy/files/proxy_conf.yaml | 74 ++++++++++++ roles/sram-midproxy/tasks/main.yml | 59 ++++++++++ roles/{plsc => sram-plsc}/defaults/main.yml | 0 roles/{plsc => sram-plsc}/handlers/main.yml | 2 +- roles/{plsc => sram-plsc}/tasks/main.yml | 2 +- .../{plsc => sram-plsc}/templates/plsc.yml.j2 | 0 roles/{redis => sram-plsc}/vars/main.yml | 0 roles/{redis => sram-redis}/defaults/main.yml | 4 +- roles/{redis => sram-redis}/handlers/main.yml | 2 +- roles/{redis => sram-redis}/tasks/main.yml | 2 +- .../templates/redis.conf.j2 | 0 roles/{sbs => sram-redis}/vars/main.yml | 0 roles/{sbs => sram-sbs}/defaults/main.yml | 2 +- roles/{sbs => sram-sbs}/files/yarn.gpg | 0 roles/{sbs => sram-sbs}/handlers/main.yml | 4 +- roles/{sbs => sram-sbs}/tasks/main.yml | 14 +-- .../templates/alembic.ini.j2 | 0 .../{sbs => sram-sbs}/templates/config.yml.j2 | 0 .../templates/disclaimer.css.j2 | 0 .../templates/saml_advanced_settings.json.j2 | 0 .../templates/saml_settings.json.j2 | 0 .../templates/sbs-apache.conf.j2 | 8 +- .../templates/sbs.service.j2 | 0 roles/sram-sbs/vars/main.yml | 1 + 70 files changed, 876 insertions(+), 23 deletions(-) rename roles/{ldap => sram-ldap}/defaults/main.yml (95%) rename roles/{ldap => sram-ldap}/files/eduMember.ldif (100%) rename roles/{ldap => sram-ldap}/files/eduPerson.ldif (100%) rename roles/{ldap => sram-ldap}/files/groupOfMembers.ldif (100%) rename roles/{ldap => sram-ldap}/files/ldap-add (100%) rename roles/{ldap => sram-ldap}/files/ldapPublicKey.ldif (100%) rename roles/{ldap => sram-ldap}/files/logrotate_slapd (100%) rename roles/{ldap => sram-ldap}/files/rsyslog_slapd.conf (100%) rename roles/{ldap => sram-ldap}/files/sczGroup.ldif (100%) rename roles/{ldap => sram-ldap}/files/sramPerson.ldif (100%) rename roles/{ldap => sram-ldap}/files/voPerson.ldif (100%) rename roles/{ldap => sram-ldap}/handlers/main.yml (100%) rename roles/{ldap => sram-ldap}/tasks/admins.yml (100%) rename roles/{ldap => sram-ldap}/tasks/main.yml (99%) rename roles/{ldap => sram-ldap}/templates/ldap-backup.sh.j2 (100%) rename roles/{ldap => sram-ldap}/templates/ldap.conf.j2 (100%) rename roles/{ldap => sram-ldap}/templates/slapd.service.j2 (100%) rename roles/{ldap => sram-ldap}/vars/main.yml (100%) create mode 100644 roles/sram-metadata/defaults/main.yml create mode 100644 roles/sram-metadata/files/01_idps.fd create mode 100644 roles/sram-metadata/files/02_backend.fd create mode 100644 roles/sram-metadata/files/03_frontend.fd create mode 100644 roles/sram-metadata/files/surf.png create mode 100644 roles/sram-metadata/files/surf.svg create mode 100644 roles/sram-metadata/files/surf_bimi.svg create mode 100644 roles/sram-metadata/files/surfconext.crt create mode 100644 roles/sram-metadata/files/transform.xslt create mode 100644 roles/sram-metadata/files/transform_proxy.xslt create mode 100644 roles/sram-metadata/handlers/main.yml create mode 100644 roles/sram-metadata/tasks/http.yml create mode 100644 roles/sram-metadata/tasks/main.yml create mode 100644 roles/sram-metadata/tasks/pyff.yml create mode 100644 roles/sram-metadata/templates/index.html.j2 create mode 100644 roles/sram-metadata/templates/pyff-metadata.service.j2 create mode 100644 roles/sram-metadata/templates/pyff-metadata.timer.j2 create mode 100644 roles/sram-metadata/templates/vhosts.conf.j2 rename roles/{plsc => sram-metadata}/vars/main.yml (100%) create mode 100644 roles/sram-midproxy/defaults/main.yml create mode 100644 roles/sram-midproxy/files/internal_attributes.yaml create mode 100644 roles/sram-midproxy/files/plugins/attribute-maps/basic.py create mode 100644 roles/sram-midproxy/files/plugins/backends/openid_backend.yaml create mode 100644 roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml create mode 100644 roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml create mode 100644 roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml create mode 100644 roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml create mode 100644 roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml create mode 100644 roles/sram-midproxy/files/proxy_conf.yaml create mode 100644 roles/sram-midproxy/tasks/main.yml rename roles/{plsc => sram-plsc}/defaults/main.yml (100%) rename roles/{plsc => sram-plsc}/handlers/main.yml (94%) rename roles/{plsc => sram-plsc}/tasks/main.yml (96%) rename roles/{plsc => sram-plsc}/templates/plsc.yml.j2 (100%) rename roles/{redis => sram-plsc}/vars/main.yml (100%) rename roles/{redis => sram-redis}/defaults/main.yml (67%) rename roles/{redis => sram-redis}/handlers/main.yml (83%) rename roles/{redis => sram-redis}/tasks/main.yml (98%) rename roles/{redis => sram-redis}/templates/redis.conf.j2 (100%) rename roles/{sbs => sram-redis}/vars/main.yml (100%) rename roles/{sbs => sram-sbs}/defaults/main.yml (99%) rename roles/{sbs => sram-sbs}/files/yarn.gpg (100%) rename roles/{sbs => sram-sbs}/handlers/main.yml (76%) rename roles/{sbs => sram-sbs}/tasks/main.yml (96%) rename roles/{sbs => sram-sbs}/templates/alembic.ini.j2 (100%) rename roles/{sbs => sram-sbs}/templates/config.yml.j2 (100%) rename roles/{sbs => sram-sbs}/templates/disclaimer.css.j2 (100%) rename roles/{sbs => sram-sbs}/templates/saml_advanced_settings.json.j2 (100%) rename roles/{sbs => sram-sbs}/templates/saml_settings.json.j2 (100%) rename roles/{sbs => sram-sbs}/templates/sbs-apache.conf.j2 (88%) rename roles/{sbs => sram-sbs}/templates/sbs.service.j2 (100%) create mode 100644 roles/sram-sbs/vars/main.yml diff --git a/roles/ldap/defaults/main.yml b/roles/sram-ldap/defaults/main.yml similarity index 95% rename from roles/ldap/defaults/main.yml rename to roles/sram-ldap/defaults/main.yml index 7d02ebe4a..e20f4553d 100644 --- a/roles/ldap/defaults/main.yml +++ b/roles/sram-ldap/defaults/main.yml @@ -3,7 +3,7 @@ ldap: "{{ ldap_defaults | combine(ldap_overrides, recursive=true) }}" ldap_defaults: image: "ghcr.io/surfscz/sram-ldap:main" - conf_dir: "{{ current_release_appdir }}/ldap" + conf_dir: "{{ current_release_appdir }}/sram/ldap" ldif_dir: "{{ ldap_defaults.conf_dir }}/schema" certs_dir: "{{ ldap_defaults.conf_dir }}/certs" backup_dir: "{{ ldap_defaults.conf_dir }}/ldap" diff --git a/roles/ldap/files/eduMember.ldif b/roles/sram-ldap/files/eduMember.ldif similarity index 100% rename from roles/ldap/files/eduMember.ldif rename to roles/sram-ldap/files/eduMember.ldif diff --git a/roles/ldap/files/eduPerson.ldif b/roles/sram-ldap/files/eduPerson.ldif similarity index 100% rename from roles/ldap/files/eduPerson.ldif rename to roles/sram-ldap/files/eduPerson.ldif diff --git a/roles/ldap/files/groupOfMembers.ldif b/roles/sram-ldap/files/groupOfMembers.ldif similarity index 100% rename from roles/ldap/files/groupOfMembers.ldif rename to roles/sram-ldap/files/groupOfMembers.ldif diff --git a/roles/ldap/files/ldap-add b/roles/sram-ldap/files/ldap-add similarity index 100% rename from roles/ldap/files/ldap-add rename to roles/sram-ldap/files/ldap-add diff --git a/roles/ldap/files/ldapPublicKey.ldif b/roles/sram-ldap/files/ldapPublicKey.ldif similarity index 100% rename from roles/ldap/files/ldapPublicKey.ldif rename to roles/sram-ldap/files/ldapPublicKey.ldif diff --git a/roles/ldap/files/logrotate_slapd b/roles/sram-ldap/files/logrotate_slapd similarity index 100% rename from roles/ldap/files/logrotate_slapd rename to roles/sram-ldap/files/logrotate_slapd diff --git a/roles/ldap/files/rsyslog_slapd.conf b/roles/sram-ldap/files/rsyslog_slapd.conf similarity index 100% rename from roles/ldap/files/rsyslog_slapd.conf rename to roles/sram-ldap/files/rsyslog_slapd.conf diff --git a/roles/ldap/files/sczGroup.ldif b/roles/sram-ldap/files/sczGroup.ldif similarity index 100% rename from roles/ldap/files/sczGroup.ldif rename to roles/sram-ldap/files/sczGroup.ldif diff --git a/roles/ldap/files/sramPerson.ldif b/roles/sram-ldap/files/sramPerson.ldif similarity index 100% rename from roles/ldap/files/sramPerson.ldif rename to roles/sram-ldap/files/sramPerson.ldif diff --git a/roles/ldap/files/voPerson.ldif b/roles/sram-ldap/files/voPerson.ldif similarity index 100% rename from roles/ldap/files/voPerson.ldif rename to roles/sram-ldap/files/voPerson.ldif diff --git a/roles/ldap/handlers/main.yml b/roles/sram-ldap/handlers/main.yml similarity index 100% rename from roles/ldap/handlers/main.yml rename to roles/sram-ldap/handlers/main.yml diff --git a/roles/ldap/tasks/admins.yml b/roles/sram-ldap/tasks/admins.yml similarity index 100% rename from roles/ldap/tasks/admins.yml rename to roles/sram-ldap/tasks/admins.yml diff --git a/roles/ldap/tasks/main.yml b/roles/sram-ldap/tasks/main.yml similarity index 99% rename from roles/ldap/tasks/main.yml rename to roles/sram-ldap/tasks/main.yml index f58c21ee7..97977e06a 100644 --- a/roles/ldap/tasks/main.yml +++ b/roles/sram-ldap/tasks/main.yml @@ -68,7 +68,7 @@ - name: Create the ldap container community.docker.docker_container: - name: "ldap" + name: "sram-ldap" image: "{{ ldap.image }}" restart_policy: "always" state: started @@ -112,7 +112,7 @@ - name: Ensure the schemas are added to LDAP ansible.builtin.shell: # For now the target side /opt/ldap is hard-coded - cmd: "docker exec ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" + cmd: "docker exec sram-ldap /opt/ldap/ldap-add /opt/ldap/schema/{{ item }}" register: "result" failed_when: "result.rc not in [0,80]" changed_when: "result.rc != 80" diff --git a/roles/ldap/templates/ldap-backup.sh.j2 b/roles/sram-ldap/templates/ldap-backup.sh.j2 similarity index 100% rename from roles/ldap/templates/ldap-backup.sh.j2 rename to roles/sram-ldap/templates/ldap-backup.sh.j2 diff --git a/roles/ldap/templates/ldap.conf.j2 b/roles/sram-ldap/templates/ldap.conf.j2 similarity index 100% rename from roles/ldap/templates/ldap.conf.j2 rename to roles/sram-ldap/templates/ldap.conf.j2 diff --git a/roles/ldap/templates/slapd.service.j2 b/roles/sram-ldap/templates/slapd.service.j2 similarity index 100% rename from roles/ldap/templates/slapd.service.j2 rename to roles/sram-ldap/templates/slapd.service.j2 diff --git a/roles/ldap/vars/main.yml b/roles/sram-ldap/vars/main.yml similarity index 100% rename from roles/ldap/vars/main.yml rename to roles/sram-ldap/vars/main.yml diff --git a/roles/sram-metadata/defaults/main.yml b/roles/sram-metadata/defaults/main.yml new file mode 100644 index 000000000..5355989bc --- /dev/null +++ b/roles/sram-metadata/defaults/main.yml @@ -0,0 +1,81 @@ +--- +metadata: "{{ metadata_defaults | combine(metadata_overrides, recursive=true) }}" + +metadata_defaults: + image_server: "ghcr.io/openconext/openconext-basecontainers/apache2:latest" + image_pyff: "ghcr.io/surfscz/sram-pyff:main" + hostname: "meta.{{ base_domain }}" + basedir: "{{current_release_appdir}}/sram/metadata" + + # server_name: "metadata-server" + + user: "sram-metadata" + group: "sram-metadata" + +# idps_source: "https://metadata.surfconext.nl/idps-metadata.xml" +# idps_cert: | +# -----BEGIN CERTIFICATE----- +# MIIEKjCCAhICEG12w6QqayYAWntxDN59dU0wDQYJKoZIhvcNAQELBQAwPDELMAkG +# A1UEBhMCTkwxEDAOBgNVBAoMB1NVUkZuZXQxGzAZBgNVBAMMElNVUkZjb25leHQg +# Um9vdCBDQTAeFw0xOTAxMTQxNjM5MDVaFw0yNDAxMTgxNjM5MDVaMGsxCzAJBgNV +# BAYTAk5MMRAwDgYDVQQIDAdVdHJlY2h0MRAwDgYDVQQKDAdTVVJGbmV0MRMwEQYD +# VQQLDApTVVJGY29uZXh0MSMwIQYDVQQDDBpTVVJGY29uZXh0IG1ldGFkYXRhIHNp +# Z25lcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMckFyqXzW7dbMt4 +# wDdSLaAjFAbNziUgQaivu4dl9Uf/cZ4f36a9DfQBUSraNoIR76ruwK3TPfFalemp +# xmWTsoVSQpb3AOsWbU+i0YKS1cmcqMUC1fef2j1IbuK4B4nEu9S5saGNVGNvUJ+Y +# jDUpC5vyyp7boW9E1md2jIBI6Mw+ZhlmkPucqaphxurWnm0KbxTZrYLOBZ1IXj6r +# yrRoFwwtjEH+CW8cRn8OATK0q4yb0BVr2gY2tp/lTpASHZ3WVWBK0prwK0KkusY6 +# ck+/vvlk46IdEr803NB0Dm3ECh3i65mfCaWzVTtd/md874paK+65f1JeVyd5I5al +# M2KEpvkCAwEAATANBgkqhkiG9w0BAQsFAAOCAgEAjvJXXkxOqh3K0k2NdDG5EOTy +# bA+koRbAqhdY/qJoSnqTzwBxJc6aPs+L4q2PIoLo0gNJj1Nm1taLusaaK+CBx3ar +# 1kxEika5FM0dqFjD3i7Y5U0FMeDB5cReo8TNdo31VGoY7CbRjtqHLRTuKzNmIfEm +# ahLnHIBtarE82b7Mpg0aLxjrRR+t8wSCriy+e9AEPzC5bWxtPJA+OhU8U9hMuOs5 +# SzKmHwYue4WY3q1rRaDpK3fqgXRDRfznNn9/RDDbBos7CRMSAPEmAO28qLKBW/1z +# a2TKQLddZ3uoCurFNbToSTueKYVEnveQNO2P5X6uy4rcYkjeSiwbmHo7jYuHAxx4 +# uGzHMpoqoGNx+2iYjtUo3dJUXzcZai3X+RuuMKXXvqGzrxJsoKayNVAE1dWoUHJl +# RouPhDLTdZq/pblORhFS8r10rKhSScgrNuN9LTTV7EPFeVr8trocNwl8IruH+eNL +# 6/7b5Y7fb7rvpxeHjWrTz8a9BXAIAv+bgyrg4OHGRcNIQb0XF438HD9r8Zb92B6Z +# VCR3aVS5496+1td+8aN/Blzo59LhKPiHyGZCPHFV/oBqG7nxp603kcWmJOcG+AgB +# 9bFiAimF5LLk/LnMfplK9w0vvxWVcdQkDgVPYvEGNtttj0QC7/jM4ZeihGb6Oyzy +# DZA6aeg73/ygOATQ13A= +# -----END CERTIFICATE----- + idps_filters: [] + + idps_files: + - name: "dummy-idp" + metadata: | + + + + + + + SRAM VM Dummy IdP + SRAM VM Dummy IdP + https://test-idp.sram.example.org/ + + + Administrator + mailto:sinterklaas@example.nl + + + +# idps_xrd: "{{metadata_defaults.basedir}}/certs/surfconext.xrd" +# idps_source_dir: "/opt/metadata-src" +# idps_feed: "{{ metadata_defaults.basedir }}/idps_feed.fd" +# idps_file: "idps.xml" +## +# proxy_frontend_source: "https://satosa.local/frontend.xml" +# proxy_frontend_feed: "{{ metadata_defaults.basedir }}/frontend_feed.fd" +# proxy_frontend_file: "proxy_idp.xml" +# +# proxy_backend_source: "https://satosa.local/metadata/backend.xml" +# proxy_backend_feed: "{{ metadata_defaults.basedir }}/backend_feed.fd" +# proxy_backend_file: "proxy_sp.xml" diff --git a/roles/sram-metadata/files/01_idps.fd b/roles/sram-metadata/files/01_idps.fd new file mode 100644 index 000000000..46d58b663 --- /dev/null +++ b/roles/sram-metadata/files/01_idps.fd @@ -0,0 +1,23 @@ +- load fail_on_error True: + #- "https://metadata.test.surfconext.nl/idps-metadata.xml verify certs/surfconext.crt" + - "https://metadata.test.surfconext.nl/idps-metadata.xml" + - "src/" +- select: + - "https://idp.diy.surfconext.nl/saml2/idp/metadata.php" + - "http://mock-idp" + - "https://login.test.eduid.nl" + - "https://idp-acc.surfnet.nl" + - "https://login.uaccess-a.leidenuniv.nl/nidp/saml2/metadata" + - "test-idp.lab.surf.nl" + - "https://test-idp.sram.surf.nl/saml/saml2/idp/metadata.php" + - "https://idp.ci-runner.sram.surf.nl/saml/saml2/idp/metadata.php" +- xslt: + stylesheet: "xslt/transform.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/idps.xml.new" +- stats diff --git a/roles/sram-metadata/files/02_backend.fd b/roles/sram-metadata/files/02_backend.fd new file mode 100644 index 000000000..698d615a4 --- /dev/null +++ b/roles/sram-metadata/files/02_backend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/backend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_sp.xml.new" +- stats diff --git a/roles/sram-metadata/files/03_frontend.fd b/roles/sram-metadata/files/03_frontend.fd new file mode 100644 index 000000000..252206d42 --- /dev/null +++ b/roles/sram-metadata/files/03_frontend.fd @@ -0,0 +1,14 @@ +- load fail_on_error True: + - "https://proxy.acc.sram.eduteams.org/metadata/frontend.xml" +- select +- xslt: + stylesheet: "xslt/transform_proxy.xslt" +- finalize: + cacheDuration: P7D + validUntil: P14D + name: "SURF Research Access Management" +- sign: + key: "certs/signing.key" + cert: "certs/signing.crt" +- publish: "out/proxy_idp.xml.new" +- stats diff --git a/roles/sram-metadata/files/surf.png b/roles/sram-metadata/files/surf.png new file mode 100644 index 0000000000000000000000000000000000000000..e2bc4a3c2b61cabcd3babac9f9b67ef8c860dfbc GIT binary patch literal 16016 zcmeHu2UJs8*XX4Ps5CzdA_6Lih%^Cd0fM41i~>3c7!j}}0VFhogiah$9BB#yhHga! zMT2w#2+~PFkP?!BfHZ*wX_5ZUW!}vH&3p5|nQ#67dh4yXzU5+h?>>9)b9OoV?7R2L zFQ+ZdL^kc(1VNC9`47i`f}jmd_)mx*Kw=HTIN(3t%O}l_Lu=tN%@yEnqt_28JOmX* z!hbx?^QCG4DClEuaYC?vvw*1FmeeWVb_m)7nIAuT)~{!RHF;giF6niDc-|uN@D_2E z4aPqFVRGWTdHRL;PwY|THQBQBZsloy0Xg-Z#7`Ib`Amrmd+7;i^I@~?ZwMXNUmY1w z(acQdbqZnSzjfu@Lv%h1XW2xP#6MC$K-*ae$ih6;%o>adI|mt5F$X( z?*JGT2K=9Ce`ofe(ynv=ZQkFR!PERXExfYd=KXWp-e(_&0?3X9fJOO;}fdPW#tF{DJnbt@yh}{&^w)6)XM? zA^zaTe`<+;QHcK^T~Q8%vEOv_*GBqtZ~23w|JlHQ!$|-3wEu>9@$aDb+d24Obwx!+ zMPx>H!IFEdgSPBipOhSrU&hDuwUIpy>eX8{c|$Z&Bcp&$GbBp4Pb>(0%_O?U2rl%|!RoXV--tD>m+S|xC7wQHcW;|A@g zCPD#Lp$gY6djd+GQXV|@iUM!~wU=xXijN2pMfIp!*7nX&f3I}YN?YenB2B;LQx9Sf z)2%R&>^IW9lgLIiX<0Az^7K1qPKLPRg^gpnb@_z2nh2p>g>eu80$n0HJy=ABKIX3iu%cL8zs5Q-yFtRVmy;qUvK4oHi$ zpLH9e$^zYmp`dt%4VdLlM+nvq0rhz&_$zd>ZnP}NY}wt(Ztz))!wG&a^_aGPzQo!Z z2d!SNS;@%B=kp?DqRtTnSJ5>q7k#a;ojp87;f&=6r0aywa);Q0WV)5~vQQ;o3T7-t za*r)j3|+o#b4JY|nZbN}8yX5(Cx zw`a#SW(9nESUmlz3?AbWzAA@b-)MF?FoW4iu4~G;XjLkL zZb)kMg`$TQ;s|ZhVF*IKZ;Uw>O?u%%)p0Q=h;<)I`LeMtsfXfe~%7@ru z+P#RA0>tSHXbe?azv1o@9~Ce-QF1L#$>+g#t{`eMRD&P7J9kpSmUXl!4#&M>_2Y(& zipH_=bTeKVSyZ#dAr+?asO-b)N_5G$X6QNFB87d)&5h2 z(H)1_TrEvrXqn0z7C5$>ccZc4Yxl$(mTzr_Wl|LQj2$fVXsy1Iqe1}hvz8?E^_p9$ zun^biyuO71@#_W55cRq^dxacGR_BM5v|8A|93*;i?$?w`nLruGbWnafxBC2?Y|TNJ zuEp$yP9H3MaZKl&cST(B7U=XVMmKw-!^N5v(at4V5gy{pm87>v2rp8$a}jSWd59Gl zqL*54&*6K}S&ovMC;6rqN8fE5^gU2Oh&zDt6wHvyDF!gz@vYGJn$i(fyfe6`;wl%7 z4o5)J*OOk!?R|92mRZpll?C~9x!ZP58W$02dH^ax6l_9=af zvJYWgl;}}fahM+p_^e=`i~-@Hd65rV9R?V@zRNtV683UXzbca_BghRrII1C-JfmWz zU^+XTJ3lmghf4yFZf^YGbHKQ0X@dZ?%H|QnU&b>sSG#nf(fBpQVZIl)wsWm;Xar<9 zIDb~0eSLLWgBJ>@#Y&2^L;Qh>f{9+-rp-{&n?1vrJRpBmd|-_+>OUn3E!Gul zoQ?>Ud@t*3)tq5IBDp*nT1bt04{hUeJ*RrN85h}MW@y4PTgL|F;dnhIyEenX&Ge`J z?4V5_DzR?YUBdA?EKRI>^~Vb9&vHF+ROzrlhXPYCj*k@P-kDnNs%OZZrEd}kD;iXj zao6mJv@A7gii45miIqv3sda7z$0%{w1HRQhcbjQgo6IG!JBgcon za}K|>$0TMy5e-r;jX5dA3QC*^Omp_Akq#^M;v_jsI#0(_#@(|niEyqk$N(n^bQ1EaQ6P7)Z5~rQ+X)XbazXDrn?^2wXpKTtS7ULnhy!N1PH( zns5?W*3aH&JyL#j>D#`;*B#`IT9$dnH1!%cG&=OHDDD+0MA?)U1m1Uz;FQE)Bjwj8 zr!C*EXg*eS(@TQw;FaL2AaCr+91#oYfEqG zQq9hpDInW9^rgs_yQojrI7msLHWI|yRg_g1HP5+gaW;FiIU^|KKn0~fTSmU;JMmB> zm-SA*PgZ}#=0IUxP~iKEJvR+GRlqpcO#=mcf>$HTz7$p^qNM!=zq)qD1j$}LfAoa| zdaiztv^0(~UHBD$Eg6$=E0<%mx>R1<^zzJz9jhg82iO+l1I1aPdQD|N=QmxqHDlgb zoO#k0p@Tb`nlH;nyhlEw)Vd-C=w{@C~8RXSxY>C~e9LwR>e! z!hkQKJ6E=Q*66ASK!B zh_zXGIkcx~S=lwB%)piVE-}I~{hCJO%X`R`8P~~X9q@oV2CRiPr@(lTA^WoOGCSnLn zT@Nw!J~KEqPZC57dWdnHoZ*>xBd4_8Ra$tbqhM_nDcAKGap;hz@#dm#%xa%CdZM&( zQM+s=%6HYh?~=1@lS!XB0U*XS2Y##NevsHTCQ9_m#3ma0`*Gjc9f)o%^a=G4f~rzq zBam%W_qvPJRCZFSy5XCsEdXd@KRbF_^T4H6%t@V#vY%)|P(SUGA;+d!H2D44ESK9R zE?R#V;DC^EksR|DLTjqsol0Th3VT7CCpt<_9BB|DEMB@urJQIg)8;oW!v8QR6GCG! zzgM+y?PM~@^_xwgyWiC0M#CG9uH|3%r7O^}OEu4`_f+3>Fx4%OTHT3Gu&Zr-<1p-h zMHo!`MihU6sW6VqTdmdSjszaJ(ri?`8L4@7a!xt8f2^aX*vqp`J6rjhn>avjt$lT1 zNhv$f{Zuj4=5=h@kJs*>sg0_rjgQipZB%cW?%cWS4NXBRk3S6IAk4)i6o-5=D{8cH zJWyP(8z(H~Z-b~S{8ou}t5s|mttr(7oDh!1o`UjE|kgP<@{z6QlqCr z_`986i*CDF#Q{+i$EaG;`5zbwA4R8QihP1Z1S*`Y=hdQqA+o z3nSN8{AkD-!N~U?H!bAw_v3p7I2dF z&HaJIRPJoG4C?Cn^m;8s_7<`+g;V?n+jVQkYLX8+a*xmAWv@q*?lOU=90j29o1J42#P zZ*K-eVvL&;9u*PxAESNNs=8fx^qXb4G=!%C^M*oOFuk#kqJAED|0@Ag=tNTCwW{Xs zi#%b7B5|Su3(G#R4GaY`mjx{R+pFYAw`<|K#1c&*5mfOz0pp^XaEn1%Z`_O1xFUuU z9}voRrbRdP3+e4K6J$CyvlNd3UcT&l-`a#|niB=85j5GPK4r~%C1-)nA}%2{!7eX= zQqSjm+s-PE*UC=v2Y*_|YWKtH>3kj^qhGJD=1#g{W-kTufmAWySHWLUouc7kmqU;7 zh_laP+Xw1ou=o3F5C&D9LZC7i%ma0fK#;+u{)_g-N&`($ZuwvjMLA zF{xjFSYxZ_DbMsRU*;l$s=MJimofgILWA%9!s1NXP}h#V=7L~jH`(=GA)^v~n2+AV;w8Yn&0{MdPxh5=BEXST?IFG){+d+F!tYHfjKACW$ z{H6I0`jW6}kcsxYKI@r?D@%B#Y`fFSDwy_~5>6HfMnsuwn%RcRTtq5G|ie+7@b7z&AgOswd zGs>GoI#JtUa4|U6d;)<&-__?KQQn1P$d<`e#m@hU95w7|sqcDfAwV%qP zDm5{`Pk)rmBo$uUNSVRMq90ybE&4`?e-jHgYzC8+G?S+FZW9G_z-_`3>$z-?McI=d29 z<88Y@zjXiXmwia*_G!7+&e~-MO-?6ImGoH1?snXCvqbDb>JTQxbJF*d6KpGp=b2|kj%Cj#%=z|% z@EtIyjl?85424ENLj+?eD@GLum(zfgcYuH~6uSBhv4WO6RmmTY#VJt;d*WamT9_HQ#3rF+=ly(PY+MgD@Fv`O~1BT0E+Md%UrX;&~4qx4%VXQ;t zkvA3~DpdC|zs%JIIoWMZQwyfweS73*TXOK_y}~O>smaS{E9Xh({ryJD zQ*K=oZs(USxhU$TGI~F313NUR9js%AS&xyXssZy=(M*dkY3LR%^xZ9jGN{z9P3Y$u z83oR@E*QB6<(m|5vU~fIdzIT~-PGKJ zOUK$dgz|5)my%sW$eq3Q@&|4MAM$f4m%YjfY`?_xxf8v4X;Et>Ag2h))EF!~a$6ur z({};XHvIg`D;h&J3@fRZEy4ra_nz<3zu2BzcXz2;qy{V3cOzoiB^F3l7mzF=nZKL! zQ-E~QiMofY%Zh+GJqzV$(JS^bNSy2p`Ht9}lSc+bUr zT<}d~W3!OV&WUVgjfRSu#`B0)$u$k$AAjMdlN%pDbM}qi)q9}K)o+RVE@S)qHqW8S zWN+1L9dBE*CX?F>Wyt(O1Q!M)C-3S|Wc+hALG7rD@}^_aOM#u=O(pwR&*jl>Ej`Dl zZ>hZ0tz>e>L(_HtY*O^c>w~#(9#A!?6`}xTz8Hj-u`?w)(rg`--5XKIQd0B9*`2ns zEk*>O&s)!z-hKD-yh5df>Dr%F&om+r!%5(29TomBuGO-hjQ6^wF8yatYd89gkVS#1 zoIVB>=>#{8m)vbx{O+yDEWL&jto){Db9*04JLL1__MYxy!{#PAnavAjx8~-13L0~y zid%QSdqdBTaBcz}wXr*G)rP2iFS!xrY&k>c*_>x{^Nhyw@uXs}U4>qDCnQ?%f}cZh z@sCcI4mkCf#K#2WOlnLTEMDzs=k)rP-v#mr5ft0re7Ebid=Nc7v&ASe-JY({-sctC zUPWzM7x&<&qVINojGq!LXO=6|8!(3g$#E7jvIZ5np(p3@g5vDcDKoa(9$N0Kwz|}j zX<~X!UT48bPQbEs84i<|cmyQHqBSJjZ%T~K?KSV)R>;rt`^&?ZUV(uyM6g%qv7$on zwD0b7VYLzR#hLy?E>vkE7KG7EqrSU8^*jo8|A0oM_m|v~+q;%j^=8PLc6gj}ANOj|A?5q6QkAQl&zF4B zr9Q5Sh(>pfM`?s!2*}^U2E^iQpI%oiDP$y}*s8XzZscA_3~W0VP2gx zebQ2D`Hu6#{_M_{2B*bGZj+_ez)kus!zn?P_`_x-=hVI#182d2_{@c8`NF*g^DjNc z?!Ab^O_T5YA*oHa+@ZDjNu7|1Smrw)^ou+=36h>0ms@QX(`-?w7{ZKab6Uu*hZPG| z%BO31%5UFnCO0&hIUG+N8cOXW_$x0l7%S{IaF#gsG*N{4Rj*Nbd^s&qWvJ>z2$kHC zfx}x(S-6Q^UOZp67TXk z$h7D0+nrVI4HJiM7IcM5sHEJSQHeW6{{H>Q_e=vLs_v6W(eGbZ9}2pM5_IiB6mLJA zsUrVrPu!_kK&w42n9dWLWNu2@jTZ51pRTKD{0c4AB3lZ)vE~KzkmZTa^yRhvH-$Cz zTNf7-a|%NjSa02j+CI@WK>lOtig)|4C+*;KlaF<4U;5oey{Z~4cKAvk zFi+TEP?fFaZuq<@2?2JiW}5W=IDBjVY^iSZ#sd)?qu!TlzMYJUZ?HCqb@-R3!lNQT z6Z=EP6Po=TzZO;q5?%#e;7o%^sec*AulMB%(XlXOD{id7e@Kcgm3X}=A*42S1hxXv z`Vd>Tdc|w8d^Pr`g|iSOvmL$*;JU(P>;N7GGS3K{RoiF(8G<)!T)KDj(yNN`Mi6N|&1Mfz z=foV!n7PT~y^vSXyGGin0^mKGW4ebI<}-9YBwUQwDQB^L&UDc}K5b8tRE0lRqZJH# z)5awzNvpN0;|RC$SP$Ggp!t&O{JvAw*ycHO-8L3COkKXgbi3_VKoG1RB|KQ8^LU-D9rx=~D%oRgr#l?aH zup`r^Z@xIOz<w$}shb-;HO4-?sv3`$h5TQnKN#*}GD3zJw~?V>fA*=5eSaA! zqUWD1n_V$@@xE-t$=6ufch`$lESesVw6f6!@K-^qOVG#ut=NPO!(X~6jYT~!bZo5M ziE^?+SCSaFjFoKKPFdMJqmcdq_XPepmMv>|dNWj=KpH-ABA2T_sFdzf+W4J#CJ;H! zi)?dH-nFv$Nz1w4@e8Nz02WAE_4r$OGa{c|-uR*<9AXV;HkNNHZJx1y&vC^a5@}4X zy%rdJFXH9!aGB^3y-br2uKrv%m8Nsbo8FD4EU(&Kxl>iA7hffA1|@oJ*8 zabE4Mx2Q^x!Ka2^VWY*qw5S14)XWYV%5IK`X($ayLkR$>9d_KxeHXe&VSd=(IxCd; zvoi`gJzhbJ5e&?i5IN3Ol?y+*W2`IE>J(3-a}A}?V|w2cWhKNAI@LVn>#2fJFuS5h z-I8iVgj?NB=c z$qj=k*aY+2C(07s6SmiOxY|k@e*Fm(^sec>lZL-nftBcTygqgK3`ZO{eG_@)Rse~# z9n>L@kS1~img(u(*l#!a9=n%b$y{{8*$uY<_lOLMBLpWtH`}hiJ zVTr2ozSrjTYM=Ta^uTsuBIwE=<^q> zS@{LMmgzJkzOIa~Ut>m)>TmacCJIhsvkx;W4u>I4E~lKSH7j=q1yS2nC+D?NlZvIc zjW54$4OqRFFhuEkT}aw-FOF+}9XUTq=T@}p))vUH1CJJnvq7s*qpw0cESG;-{fXJ6 zOnHSvCw*3fE|$v4&dK|F)-n~FFlMN#8*zNq1Nr_5WW z^*{bv)oplm##9pf&^vpLrazuTNnNc;Z$DGb2?;7v|H@9W(xlGH+JzBN*fZTHnoeN* z7B(K5w2Ucrkay1;?XJ0Gm+GtnIsjd0%GyV9_GjhlNR4_$SNXw0deE}$`_&a>_MkA5 zwt5NQi%quZH1t0(s0_;K(;gocl5v~Mae6KCPg+9A1@vt}kLE7fv!=4~>=uH(&T_h! zAZRr#i9fg*Bbkm|^9x0ccFC^22o-AhAo5kTX6B zmtx&9Tm0{%tvk-o`3rPVu&dL3nK!v`=VjlT+fHaU);aT-h=JaAD8SR1A5@30T>_1Z zwpvgm>%zFekDcp}7k}+xBzociESAq7^fj=vpM;^&%&52yfQz15{Ui>JIu}Xt2ysgy zBO#(=&n_Mz0=Q%`1Zos8EDFG`1u6+~d#YG`={%!Z`3!5Iry-`$f6%Styavs$#v;S* z#E=pV(DS%187QPMSuCHz7ht@7>?C6e0(i&lJMJ21$vU026N6}d4{GfZ|fCDJlY3SfUBDS#cils`Ai1e$v^ z-b}F&TOX&Z@)_(}|zTupE1KccHa3pZ-gM=&Gvxo=7JnRDKuVf58?NhMGyeE7}0 zfmI>Aa|vJTNjsWPxZIn8RmJ2L`NCST_LY<@8=%cLX{d3;`;^zYf<}Y3Q8cJ*&$ekv zjfaBpw3t5>BnbtKX>T2AB6{UEO?-hpz`3L%4!BV^`5v%fTfPt2psZip+a-L&|LrZ> zM-Unlt#Vi&lllBXuI2nM+!gTAnh{PTH;;bU4Vrc6;?PTA+E&*D$&+wgrcWFwJ|X}G zjEDNK@`rF-+M=l+kZ8*!m6ZRt3h& zKF>%VqD)D>@)ZDZDB}ayFF@Hv5Q2wWzLk?lq=}BTOY-1y20-^Xw;>~!rqT4_%zY5L z$jsc7nRQWPT}?ZC;TT-%RNI4qf_>7m@_{<@QIlQs6yO0Z8VMXg(8RUr-Ngq{@JZ&i zG9CO?l=7fHbiC9}D1)G?LF?`%gdwie*>-fa`;sugO7J$7bXNSlh5ZSg*N2YF-|hzm zjA#u;YBbi1`TY6AqiJoq=%V+@VnkC1;N-=J&$!JsO2 z@p7*)By%;6mA+Oe-^+UXk4#y$OSdKE!O8O$!3XY>&8iM_OLO9I& z%z~Rfqv>LVU9a4&o|5mZc^!)(O0c2UrZ6YxxG28BEY;5kqPsz1v$cy@$ZO?5)1ZA? zgoC3|k0FX;Kl1A6X`6B@jr>K>v^Fj`5us^PKb%PdW!J?d&LK1NpW>TN?cRG3G_=zT zmM{q(4$t;UkbZ%~V{ksxKG3X84h(G?Uui2Ryu8abe?9<*RiryNqU^B1OYekR@i6Lu z^wlY4eE5k?1W257y1f0AW+EG&L5hP9?9Hn6AJ(SAQsmBsq`6(m_(vis@OT_p;p%zz z2+cq9ADJlhW!4H2zV(~2yFHEyT+evB$BgxGCs7Rai=b`Jl=Pw6oiD2V6KEto@k4Cr zbjr7e;SVK<>qRu(Lxm^nZ0PebgRhH}6vADJFFw8YB#FdoS*Juuv= z3e&xt3;CKN`SN06AOlAESbHEjQ=-;*!)r}iZIV|64BIfg)OFiS3bNl=#tyw`j%SXlCWjj!;5*T0kRu@rHdUD ze_NnHrHHshz%!>rrv=D!)*0UO+gdhiTI3CgLLH>EWt`A94u~`*I*stSYhJqFW2G7d zwWs~8)D@VWmOe_4o(dL_#M+8?xxJkLy-2VjT=lvie}roaFW@TbdZvIK$^M{67(AE+ z2B+L=ZCSEZn4!Cmh7SK-=gh#Zusm}qz+Er+q2bLe^QF{Q!cW}N`UB^dP;)A3I6>J)ZcXWi4f#lM8egRmaZ4=t~!WTw& zz`93M=2p#2GF&WuLc!G-s8!Fm#Jc3x76T$YhArddirn}ag|9%uW9CgMyLdO`4FkU> z$~R1*uL4F&Zc<2ggRLYqs?5MB(ofiGH!&k%TIJ*|igX)o+k3YV&^>v%%%;=to^4@9 zywZSLFO?n`B0#Ew6tBkApI}}FQMdN%i`C0`2>6TzS%K!f9laOMp++HB_ z6zN+7Y47Q$`CGt7{A_J4m+^yd@n-Ni;}v>U;DO7p_zz6>9&C?VN5NkKC~k$BU`v0t zj*GK4$uLI9fqS!+5@R0x+v+E?VE$FzLW-@OLLhQY75~!CJb3Sw1`G}?k>aT-*5Cz^ zA;=ZSzs+U*f+YOBg>9Me=!2IIJg}_7idw%&1F|tVRRiS#z@}BKIu36FK+pqEH2*fN z8!U!6`$3D`LwJ(yS3v#*kBusW*bW(sfNT&TDE762jFk@>k^*Tb0m=w6Sw{m`Ydowr z0Q^stC~#);tml1D#zs*8hnWC>hmm=0LFZQq&taGjV}G>jh%pRH7dr&zy{pL7vi zL;0USLV)Kn?3Z7ke#3;fAV38rlYhDi4dQWt z1z;CY$WD3C&s_HmLt8!Xjo^SGDxDj9dgtU1>kPQ4v8Vhdzdar=0TsLEK_WjHdDK(n+B2HJs12 zt3pLS;4~z9{mgOH-?h$ctLkpB1@@$T{x|`y@IaNRQSj$NVie4#?}LzTY!>YDpw3g0 z7m0hY1z@lMh6qn$B+UQx)Di3Tg&OjG&64zT8L?bP2^Q%^YVwN)7rVedT2%3Ni`T`i zoEat^6bT?AQiNN0yYl@7?4>8osVHwj$msA9UeS7yk1$8}HqO2) zH06pU0)dKJaNM=S{Gy-t9U7nHUIrWeJ4ozBuUzHlqT!dHj}UqY2~l(NfpGK;dY^=t z%&S;g1tAFvy+Td(MR~4=&B{Jut29Yol_aV6d?0>7<(Erk>om{0hR>j7hSwOSPX<4{ zQWvgHZzGOEwMx+Si~8zWl|B~>c;hzNjuBt&dHVBdUVEZs+l@lrp?x}Zk{`~PSGdKx zDs=8^b7*u5tmrFeyN^CsTMrc(h?&bcq1n#|Vw5!GNMN1&Fj!41l7j3C+i}AqliyGX=h*VTjf)W^6 z)#_*!AFaHCC9f#@f_Fj=!%5_ZTvXo63nvYuw*udgjvHtY*Zq-H>jnp&(Z0t;I{eY< zR_+$VF18qmC^n@oA3h_5)`8b7tKJRb^iARR?{+W2OqR|+1s&m^x5c`a!r>?L2;MxN zk{68m^{r4*M&yQ}78xFL-gJ1379LDJTR^DK)_^M06ebBKpZVx)t1Q+vV224;n<$YR z1cOGkCcA{{@DHfLv9-vrX+C0ay)V5N0nd3KTO)fgs{c95*}4Iq=0chI+bo&L zeykzJMz~yf&=oUht}bNh7PGmlkw^AOQt#$w8X8tz;(inpI!|<1)D( zu&;=IBgWOunt8iHQb}aY1ay@tJDl4-33JpuS1`WpDlC!RtZZFj_Ya^n^XY)XAe8^u z%76hnDGLqj5y{AJ%9$=adV%PTC6$B$AAx!cT)RNF&RtQyO?pl{P6mMl;t3_aH!@E( zH{Eds_+aHD9&nSfbYh~CkaH^aj6jz4W>|x*&m2md^c#lzo)UIdBwjK#kJWz?`ICJ( zSSDm#5ptOKpJF;d|2?Mvd0hT2$ipYgder~z>{tf?0oOz7@3g-G{%}D2f%dNth(FN& zjl<=yh4{n41iS@As{iGOcJMU+f)MMdzs>tQGgw!Ds(`;C#6Q#iz7SYfe@^??Li~aD zudVpkLi~$X{2M~Ry83 + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surf_bimi.svg b/roles/sram-metadata/files/surf_bimi.svg new file mode 100644 index 000000000..f49b7a035 --- /dev/null +++ b/roles/sram-metadata/files/surf_bimi.svg @@ -0,0 +1,15 @@ + + + surf + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/surfconext.crt b/roles/sram-metadata/files/surfconext.crt new file mode 100644 index 000000000..0e8f074e8 --- /dev/null +++ b/roles/sram-metadata/files/surfconext.crt @@ -0,0 +1,3 @@ +-----BEGIN CERTIFICATE----- +MIIFbjCCA1agAwIBAgIQagXJvtKqIRRO8zD41OktRjANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MSAwHgYDVQQDDBdTVVJGY29uZXh0IENBIDIwMjMgVEVTVDAeFw0yMzA2MDcxMTQxNDRaFw0yNTA2MDYxMTQxNDRaMIGVMQswCQYDVQQGEwJOTDEQMA4GA1UEBwwHVXRyZWNodDEQMA4GA1UECAwHVXRyZWNodDESMBAGA1UECgwJU1VSRiBCLlYuMRMwEQYDVQQLDApTVVJGY29uZXh0MTkwNwYDVQQDDDBTVVJGY29uZXh0IHRlc3QgZW52aXJvbm1lbnQgbWV0YWRhdGEgc2lnbmVyIDIwMjMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQC1Wj1MYwzY646Wa9td4zUZb5W27+cbARhNbIZsteUIPV6unxoO6tHCLJhRxC4pBTQsdrhfhh3+s5rwm8mjhJs2rciQkCdPiTl860jqihhWi5bFXyGX5o1U5mZgomUT+o7+nUj0et1l/kbFJ0GqIKtf0uPj7R/zqTpqeT0c6VFxchU6LA8GOI9w5XIISEGi/IWlDKyM69I3DmbCip/rm8u6kIQ0qqXh58lNNOsZw8WYokCXP0IPFQWpPkKC1VGYtivwKLzzvNxSGcuvp39ui+37hrdjqiTxK68Z48vJ6l+KsJP+jpDXYBYE/NsSVYez3vbVTB/l664yvBfKyGIHHDdTq5akLCQDgYQzjeNOU1oSZbcsub0k+osp7MFGkslYRhLb0V9tX0Xu+7jXzGthPUWicN0XdlHS0JOlSgOBftPn8kcqYNMF0IZVe6V/AVgfj4/4iDk3OKl9FRctFp3kSa8GzLIbjqmYXpGGIEse6U2gfqHS9WHu4odfKH7rhD3hZssCAwEAAaNSMFAwHQYDVR0OBBYEFNclSgPTrGp4QJQZGjFu6VEBTX4PMB8GA1UdIwQYMBaAFI5kmzwW92s2rRY2B5NNjSYI2oj1MA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQsFAAOCAgEAORNL7FGBkeq6u/rmcNf+jZZz27vw86COPOiN6ygTyxaBq5fmJ4JZlDnlfO4C/4iek2QjKdgPlpvATGUUMXJdO6a7A3/vXNuoIGu3Ug9GW4vpTVPulaYZedPHC8zBsxwRKwxpSTda7ubWDxH3vUxHz/zDOD2O71O6KFj6Ph8JXwa3TLH0xRN5CXa0UMKX0S+ck8MahCYnMtd99EBL/uOr0+D4q2HwxDRDpL4I9yRwyWxCafoR+6OfzO/vc/SGcjEk/9s0DrMKDkDTJlE9eZbaaWFFCkAkg3LHHLMYjykcTvjDEV75OohYcEC5/6uKHcB/ZQjHwkPBqv9pUF897yZ7sxS66GEJmqqVIC+ayWRvC8N+UmvMGWAdohrY7r7CPeTE+iVHaeB7xGTSI9BhTEv3yMNHhqzqIOvgr8h5iCv7B5hQL+V7MRqD7e7X9uRR7wbyGmwT4p4VFbz5VqthCOFobsMxam9Axt+saebRyH6Mg3Ro9D5WgGoZmTP1yyiMrmEHQdf9+iblbfTbRW0irlaX5t58fWB1u4QZqcamlhVcl65Fub0g+QkSyGDMD9G57z3CKOluNy6TxFZOxMynY6CEtaozDaiETm7NaNC1lkhi+SOHKRX5+q0KqJdnEC7GOX69hSDsCT905dpVnr8JgFKoUfXWSmbwTMj45190dw7RMzk= +-----END CERTIFICATE----- diff --git a/roles/sram-metadata/files/transform.xslt b/roles/sram-metadata/files/transform.xslt new file mode 100644 index 000000000..80673b688 --- /dev/null +++ b/roles/sram-metadata/files/transform.xslt @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/roles/sram-metadata/files/transform_proxy.xslt b/roles/sram-metadata/files/transform_proxy.xslt new file mode 100644 index 000000000..6e6f55d09 --- /dev/null +++ b/roles/sram-metadata/files/transform_proxy.xslt @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + Security Response Team + mailto:securityincident@surf.nl + + + + + + + + + + + https://refeds.org/sirtfi2 + https://refeds.org/sirtfi + + + + + + diff --git a/roles/sram-metadata/handlers/main.yml b/roles/sram-metadata/handlers/main.yml new file mode 100644 index 000000000..2787eba12 --- /dev/null +++ b/roles/sram-metadata/handlers/main.yml @@ -0,0 +1,19 @@ +--- +# - name: "enable pyff-metadata job" +# systemd: +# name: "pyff-metadata.timer" +# enabled: true +# state: "started" +# daemon_reload: true + +# - name: "run pyff-metadata job" +# systemd: +# name: "pyff-metadata.service" +# state: "started" +# daemon_reload: true + +- name: Restart the pyFF container + community.docker.docker_container: + name: "sram-metadata-pyff" + restart: true + state: started diff --git a/roles/sram-metadata/tasks/http.yml b/roles/sram-metadata/tasks/http.yml new file mode 100644 index 000000000..befcc1d18 --- /dev/null +++ b/roles/sram-metadata/tasks/http.yml @@ -0,0 +1,48 @@ +--- +- name: "Install index page" + template: + src: "index.html.j2" + dest: "{{metadata.basedir}}/web/index.html" + mode: "0644" + +- name: "Install legacy link" + file: + src: "." + dest: "{{metadata.basedir}}/web/metadata" + state: "link" + +- name: "Install logos" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/web" + mode: "0644" + with_items: + - "surf.svg" + - "surf.png" + - "surf_bimi.svg" + +- name: "Create the metadata-server container" + community.docker.docker_container: + name: "sram-metadata-server" + image: "{{ metadata.image_server }}" + restart_policy: "always" + state: "started" + pull: true + mounts: + - source: "{{metadata.basedir}}/web" + target: "/var/www/html" + type: "bind" + read_only: true + networks: + - name: "loadbalancer" + labels: + traefik.http.routers.metadata.rule: "Host(`{{ metadata.hostname }}`)" + traefik.http.routers.metadata.tls: "true" + traefik.enable: "true" + healthcheck: + test: [ "CMD", "curl", "-fail", "http://localhost/" ] + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" + diff --git a/roles/sram-metadata/tasks/main.yml b/roles/sram-metadata/tasks/main.yml new file mode 100644 index 000000000..d6ac55f29 --- /dev/null +++ b/roles/sram-metadata/tasks/main.yml @@ -0,0 +1,49 @@ +--- +- name: "Create metadata group" + group: + name: "{{ metadata.group }}" + state: "present" + register: "result" + +- name: "Save metadata group gid" + set_fact: + metadata_group_gid: "{{ result.gid }}" + +- name: "Create metadata user" + user: + name: "{{ metadata.user }}" + group: "{{ metadata.group }}" + comment: "User to run metadata service" + shell: "/bin/false" + password: "!" + home: "{{ metadata.basedir }}" + create_home: false + state: "present" + register: "result" + +- name: "Save metadata user uid" + set_fact: + metadata_user_uid: "{{ result.uid }}" + + +- name: "Create metadata directories" + file: + path: "{{ item.dir }}" + state: "directory" + mode: "{{ item.mode }}" + owner: "root" + group: "{{ metadata.group }}" + with_items: + - { dir: "{{metadata.basedir}}/web", mode: "0775" } + - { dir: "{{metadata.basedir}}/feeds", mode: "0755" } + - { dir: "{{metadata.basedir}}/src", mode: "0755" } + - { dir: "{{metadata.basedir}}/certs", mode: "0755" } + - { dir: "{{metadata.basedir}}/xslt", mode: "0755" } + notify: "Restart the pyFF container" + + +- name: "Start pyff container" + include_tasks: "pyff.yml" + +- name: "Start http container" + include_tasks: "http.yml" diff --git a/roles/sram-metadata/tasks/pyff.yml b/roles/sram-metadata/tasks/pyff.yml new file mode 100644 index 000000000..6c66a5696 --- /dev/null +++ b/roles/sram-metadata/tasks/pyff.yml @@ -0,0 +1,106 @@ +--- +- name: "create self-signed Metadata Signing SSL certs" + shell: + cmd: ' + openssl genrsa -out "{{ metadata.basedir }}/certs/signing.key" 2048; + openssl req -new -nodes -x509 -subj "/C=NL/CN=signing" + -days 3650 -key "{{ metadata.basedir }}/certs/signing.key" + -out "{{ metadata.basedir }}/certs/signing.crt" -extensions v3_ca; + chown {{metadata.user}}:{{metadata.group}} {{ metadata.basedir }}/certs/*; + ' + creates: "{{ metadata.basedir }}/certs/signing.crt" + when: "metadata.signing_cert is not defined" + notify: "Restart the pyFF container" + +- name: "Write fixed Metadata signing certificates" + copy: + dest: "{{ metadata.basedir }}/certs/{{ item.file }}" + content: "{{item.contents}}" + mode: "{{item.mode}}" + owner: "{{metadata.user}}" + group: "{{metadata.group}}" + with_items: + - { file: "signing.key", mode: "0640", contents: "{{metadata.signing_cert.priv}}" } + - { file: "signing.crt", mode: "0644", contents: "{{metadata.signing_cert.pub}}" } + when: "metadata.signing_cert is defined" + notify: "Restart the pyFF container" + +- name: "Copy source certificates" + copy: + src: "{{ item }}" + dest: "{{ metadata.basedir }}/certs" + mode: "0644" + with_items: + - "surfconext.crt" + notify: "Restart the pyFF container" + +- name: "Install IdP metadata" + copy: + content: "{{item.metadata}}" + dest: "{{ metadata.basedir }}/src/{{item.name}}.xml" + mode: "0644" + with_items: "{{ metadata.idps_files }}" + notify: "Restart the pyFF container" + +- name: "Copy pyFF xslt transformations" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/xslt" + mode: "0644" + with_items: + - "transform_proxy.xslt" + - "transform.xslt" + notify: "Restart the pyFF container" + +- name: "Copy pyFF feeds" + copy: + src: "{{item}}" + dest: "{{metadata.basedir}}/feeds" + mode: "0644" + with_items: + - "01_idps.fd" + - "02_backend.fd" + - "03_frontend.fd" + notify: "Restart the pyFF container" + +- name: "Create the pyFF container" + community.docker.docker_container: + name: "sram-metadata-pyff" + image: "{{ metadata.image_pyff }}" + restart_policy: "always" + state: "started" + pull: true + init: true + env: + USER: "{{ metadata_user_uid | string }}" + GROUP: "{{ metadata_group_gid | string }}" + mounts: + - source: "{{ metadata.basedir }}/web" + target: "/opt/pyff/web" + type: "bind" + - source: "{{ metadata.basedir }}/feeds" + target: "/opt/pyff/feeds" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/src" + target: "/opt/pyff/src" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/certs" + target: "/opt/pyff/certs" + type: "bind" + read_only: true + - source: "{{ metadata.basedir }}/xslt" + target: "/opt/pyff/xslt" + type: "bind" + read_only: true + healthcheck: + test: + - "CMD" + - "bash" + - "-c" + - "[[ $(($(date +%s)-$(date -r /opt/pyff/web/idps.xml +%s))) -lt 400 ]]" + interval: "10s" + timeout: "5s" + retries: 3 + start_period: "5s" diff --git a/roles/sram-metadata/templates/index.html.j2 b/roles/sram-metadata/templates/index.html.j2 new file mode 100644 index 000000000..f0e40b22b --- /dev/null +++ b/roles/sram-metadata/templates/index.html.j2 @@ -0,0 +1,11 @@ + +SRAM + + +

SRAM metadata

+

SRAM IdP proxy metadata
+(for use by Service Providers)

+

SRAM SP proxy metadata
+(for use by Identity Providers)

+ + diff --git a/roles/sram-metadata/templates/pyff-metadata.service.j2 b/roles/sram-metadata/templates/pyff-metadata.service.j2 new file mode 100644 index 000000000..3df9cc6e8 --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.service.j2 @@ -0,0 +1,12 @@ +[Unit] +Description=pyFF Metadata processing +After=syslog.target network.target + +[Service] +Type=oneshot +WorkingDirectory={{metadata.basedir}} +ExecStart=echo "pyff-metadata" +SyslogIdentifier=pyff-metadata + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/pyff-metadata.timer.j2 b/roles/sram-metadata/templates/pyff-metadata.timer.j2 new file mode 100644 index 000000000..b1231af1f --- /dev/null +++ b/roles/sram-metadata/templates/pyff-metadata.timer.j2 @@ -0,0 +1,8 @@ +[Unit] +Description=Create Metadata timer + +[Timer] +OnCalendar=*:00 + +[Install] +WantedBy=multi-user.target diff --git a/roles/sram-metadata/templates/vhosts.conf.j2 b/roles/sram-metadata/templates/vhosts.conf.j2 new file mode 100644 index 000000000..be3733827 --- /dev/null +++ b/roles/sram-metadata/templates/vhosts.conf.j2 @@ -0,0 +1,15 @@ + + ServerName sram-metadata-server + DocumentRoot /var/www/html + Header always set Referrer-Policy "strict-origin-when-cross-origin" + Header always set X-Content-Type-Options "nosniff" + Header always set X-XSS-Protection "1; mode=block" + + Require all granted + + + Require all granted + Options FollowSymLinks + Options -MultiViews + + diff --git a/roles/plsc/vars/main.yml b/roles/sram-metadata/vars/main.yml similarity index 100% rename from roles/plsc/vars/main.yml rename to roles/sram-metadata/vars/main.yml diff --git a/roles/sram-midproxy/defaults/main.yml b/roles/sram-midproxy/defaults/main.yml new file mode 100644 index 000000000..3522fcb47 --- /dev/null +++ b/roles/sram-midproxy/defaults/main.yml @@ -0,0 +1,8 @@ +--- +midproxy: + satosa_version: 8 + state_encryption_key: 'secret' + issuer: 'issuer' + client_id: 'client' + client_secret: 'secret' + sp_metadata: 'eb-metadata.xml' diff --git a/roles/sram-midproxy/files/internal_attributes.yaml b/roles/sram-midproxy/files/internal_attributes.yaml new file mode 100644 index 000000000..eb3dcd66e --- /dev/null +++ b/roles/sram-midproxy/files/internal_attributes.yaml @@ -0,0 +1,22 @@ +attributes: + displayname: + openid: [name] + saml: [displayName] + givenname: + openid: [given_name] + saml: [givenName] + mail: + openid: [email] + saml: [mail] + name: + openid: [name] + saml: [cn] + surname: + openid: [family_name] + saml: [sn, surname] + uid: + openid: [sub] + saml: [uid] + schachomeorganization: + openid: [schac_home_organization] + saml: [schacHomeOrganization] diff --git a/roles/sram-midproxy/files/plugins/attribute-maps/basic.py b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py new file mode 100644 index 000000000..f98466df5 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/attribute-maps/basic.py @@ -0,0 +1,51 @@ +DEF = "urn:mace:dir:attribute-def:" +TERENA = "urn:mace:terena.org:attribute-def:" + +MAP = { + "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic", + "fro": { + f"{TERENA}schacHomeOrganization": "schacHomeOrganization", + f"{DEF}cn": "cn", + f"{DEF}displayName": "displayName", + f"{DEF}eduPersonAffiliation": "eduPersonAffiliation", + f"{DEF}eduPersonEntitlement": "eduPersonEntitlement", + f"{DEF}eduPersonPrincipalName": "eduPersonPrincipalName", + f"{DEF}eduPersonScopedAffiliation": "eduPersonScopedAffiliation", + f"{DEF}eduPersonTargetedID": "eduPersonTargetedID", + f"{DEF}eduPersonAssurance": "eduPersonAssurance", + f"{DEF}email": "email", + f"{DEF}emailAddress": "emailAddress", + f"{DEF}givenName": "givenName", + f"{DEF}gn": "gn", + f"{DEF}isMemberOf": "isMemberOf", + f"{DEF}mail": "mail", + f"{DEF}member": "member", + f"{DEF}name": "name", + f"{DEF}sn": "sn", + f"{DEF}surname": "surname", + f"{DEF}uid": "uid", + }, + "to": { + "schacHomeOrganization": f"{TERENA}schacHomeOrganization", + "cn": f"{DEF}cn", + "displayName": f"{DEF}displayName", + "eduPersonAffiliation": f"{DEF}eduPersonAffiliation", + "eduPersonEntitlement": f"{DEF}eduPersonEntitlement", + "eduPersonPrincipalName": f"{DEF}eduPersonPrincipalName", + "eduPersonScopedAffiliation": f"{DEF}eduPersonScopedAffiliation", + "eduPersonTargetedID": f"{DEF}eduPersonTargetedID", + "eduPersonAssurance": f"{DEF}eduPersonAssurance", + "eduPersonOrcid": f"{DEF}eduPersonOrcid", + "email": f"{DEF}email", + "emailAddress": f"{DEF}emailAddress", + "givenName": f"{DEF}givenName", + "gn": f"{DEF}gn", + "isMemberOf": f"{DEF}isMemberOf", + "mail": f"{DEF}mail", + "member": f"{DEF}member", + "name": f"{DEF}name", + "sn": f"{DEF}sn", + "surname": f"{DEF}surname", + "uid": f"{DEF}uid", + }, +} diff --git a/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml new file mode 100644 index 000000000..cb78fcccd --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/openid_backend.yaml @@ -0,0 +1,14 @@ +module: satosa.backends.openid_connect.OpenIDConnectBackend +name: myaccessid +config: + provider_metadata: + issuer: !ENV SATOSA_ISSUER + client: + verify_ssl: yes + auth_req_params: + response_type: code + scope: [openid, profile, email, schac_home_organization] + client_metadata: + client_id: !ENV SATOSA_CLIENT_ID + client_secret: !ENV SATOSA_CLIENT_SECRET + redirect_uris: [/] diff --git a/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml new file mode 100644 index 000000000..ed97d539c --- /dev/null +++ b/roles/sram-midproxy/files/plugins/backends/saml2_backend.yaml @@ -0,0 +1 @@ +--- diff --git a/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml new file mode 100644 index 000000000..c09b218b6 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/ping_frontend.yaml @@ -0,0 +1,3 @@ +module: satosa.frontends.ping.PingFrontend +name: ping +config: null diff --git a/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml new file mode 100644 index 000000000..1f8029b66 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/frontends/saml2_frontend.yaml @@ -0,0 +1,63 @@ +module: satosa.frontends.saml2.SAMLFrontend +name: idp +config: + #acr_mapping: + # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" + + endpoints: + single_sign_on_service: + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + + # If configured and not false or empty the common domain cookie _saml_idp will be set + # with or have appended the IdP used for authentication. The default is not to set the + # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie + # will be set to the value for the 'domain' key. If no 'domain' is set then the domain + # from the BASE defined for the proxy will be used. + #common_domain_cookie: + # domain: .example.com + + entityid_endpoint: true + enable_metadata_reload: no + + idp_config: + organization: {display_name: SURF, name: SURF, url: 'https://www.surf.nl/'} + contact_person: + - {contact_type: technical, email_address: 'mailto:sram-beheer@surf.nl', given_name: Technical} + - {contact_type: support, email_address: 'mailto:sram-beheer@surf.nl', given_name: Support} + - {contact_type: other, email_address: 'mailto:sram-beheer@surf.nl', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} + key_file: frontend.key + cert_file: frontend.crt + metadata: + # remote: + # - url: https://engine.test2.surfconext.nl/authentication/sp/metadata + # cert: null + local: [!ENV SATOSA_SP_METADATA] + entityid: //proxy.xml + accepted_time_diff: 60 + attribute_map_dir: plugins/attribute-maps + service: + idp: + endpoints: + single_sign_on_service: [] + name: Proxy IdP + ui_info: + display_name: + - lang: en + text: "MyAccessID proxy" + description: + - lang: en + text: "MyAccessID proxy" + keywords: + - lang: en + text: ["MyAccessID", "proxy"] + name_id_format: ['urn:oasis:names:tc:SAML:2.0:nameid-format:persistent', 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient'] + policy: + default: + fail_on_missing_requested: false + name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:basic + attribute_restrictions: null + lifetime: {minutes: 15} + encrypt_assertion: false + encrypted_advice_attributes: false diff --git a/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml new file mode 100644 index 000000000..86ac4e1f1 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/generate_attributes.yaml @@ -0,0 +1,8 @@ +module: satosa.micro_services.attribute_generation.AddSyntheticAttributes +name: AddSyntheticAttributes +config: + synthetic_attributes: + default: + default: + schachomeorganization: >- + {{ uid.scope }} diff --git a/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml new file mode 100644 index 000000000..e820311e7 --- /dev/null +++ b/roles/sram-midproxy/files/plugins/microservices/regex_attributes.yaml @@ -0,0 +1,10 @@ +module: satosa.micro_services.attribute_processor.AttributeProcessor +name: RegexAttributeProcessor +config: + process: + - attribute: uid + processors: + - name: RegexSubProcessor + module: satosa.micro_services.processors.regex_sub_processor + regex_sub_match_pattern: ^(.+)@.+$ + regex_sub_replace_pattern: \1 diff --git a/roles/sram-midproxy/files/proxy_conf.yaml b/roles/sram-midproxy/files/proxy_conf.yaml new file mode 100644 index 000000000..136268e61 --- /dev/null +++ b/roles/sram-midproxy/files/proxy_conf.yaml @@ -0,0 +1,74 @@ +# BASE: https://example.com +BASE: !ENV SATOSA_BASE + +COOKIE_STATE_NAME: "SATOSA_STATE" +CONTEXT_STATE_DELETE: yes +#STATE_ENCRYPTION_KEY: "asdASD123" + +cookies_samesite_compat: + - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"] + +INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + +BACKEND_MODULES: + - "plugins/backends/openid_backend.yaml" + +FRONTEND_MODULES: + - "plugins/frontends/saml2_frontend.yaml" + - "plugins/frontends/ping_frontend.yaml" + +MICRO_SERVICES: + - "plugins/microservices/generate_attributes.yaml" + - "plugins/microservices/regex_attributes.yaml" + +LOGGING: + version: 1 + formatters: + simple: + format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" + handlers: + stdout: + class: logging.StreamHandler + stream: "ext://sys.stdout" + level: INFO + formatter: simple + syslog: + class: logging.handlers.SysLogHandler + address: "/dev/log" + level: INFO + formatter: simple + debug_file: + class: logging.FileHandler + filename: satosa-debug.log + encoding: utf8 + level: INFO + formatter: simple + error_file: + class: logging.FileHandler + filename: satosa-error.log + encoding: utf8 + level: ERROR + formatter: simple + info_file: + class: logging.handlers.RotatingFileHandler + filename: satosa-info.log + encoding: utf8 + maxBytes: 10485760 # 10MB + backupCount: 20 + level: INFO + formatter: simple + loggers: + satosa: + level: INFO + saml2: + level: INFO + oidcendpoint: + level: INFO + pyop: + level: INFO + oic: + level: INFO + root: + level: INFO + handlers: + - stdout diff --git a/roles/sram-midproxy/tasks/main.yml b/roles/sram-midproxy/tasks/main.yml new file mode 100644 index 000000000..2962a6f08 --- /dev/null +++ b/roles/sram-midproxy/tasks/main.yml @@ -0,0 +1,59 @@ +--- +- name: Create directory to keep configfile + ansible.builtin.file: + dest: "/opt/sram/midproxy" + state: directory + owner: 1000 + group: 1000 + mode: "0770" + +- name: Copy EB SP metadata + ansible.builtin.copy: + src: "{{ inventory_dir }}/files/midproxy/{{ midproxy.sp_metadata }}" + dest: "/opt/sram/midproxy/{{ midproxy.sp_metadata }}" + owner: 1000 + group: 1000 + mode: "0740" + +- name: Copy SATOSA conf files + ansible.builtin.copy: + src: "{{ item }}" + dest: "/opt/sram/midproxy/{{ item }}" + owner: 1000 + group: 1000 + with_items: + - internal_attributes.yaml + - proxy_conf.yaml + - plugins/ + +- name: Create the SATOSA container + community.docker.docker_container: + name: midproxy + image: satosa:{{ midproxy.satosa_version }} + pull: true + restart_policy: "always" + state: started + restart: true + networks: + - name: "loadbalancer" + env: + SATOSA_BASE: 'https://midproxy.{{ openconextaccess_base_domain }}' + SATOSA_STATE_ENCRYPTION_KEY: '{{ midproxy_state_encryption_key }}' + SATOSA_SP_METADATA: '{{ midproxy.sp_metadata }}' + SATOSA_ISSUER: '{{ midproxy.issuer }}' + SATOSA_CLIENT_ID: '{{ midproxy_client_id }}' + SATOSA_CLIENT_SECRET: '{{ midproxy_client_secret }}' + volumes: + - /opt/sram/midproxy:/etc/satosa + labels: + traefik.http.routers.midproxy.rule: "Host(`midproxy.{{ openconextaccess_base_domain }}`)" + traefik.http.routers.midproxy.tls: "true" + traefik.enable: "true" + # curl is not availavble in the minimized satosa image + # so this healthcheck won't work + # healthcheck: + # test: ["CMD", "curl", "--fail" , "http://localhost" ] + # interval: 10s + # timeout: 10s + # retries: 3 + # start_period: 10s diff --git a/roles/plsc/defaults/main.yml b/roles/sram-plsc/defaults/main.yml similarity index 100% rename from roles/plsc/defaults/main.yml rename to roles/sram-plsc/defaults/main.yml diff --git a/roles/plsc/handlers/main.yml b/roles/sram-plsc/handlers/main.yml similarity index 94% rename from roles/plsc/handlers/main.yml rename to roles/sram-plsc/handlers/main.yml index 9ce03e899..70cbb3672 100644 --- a/roles/plsc/handlers/main.yml +++ b/roles/sram-plsc/handlers/main.yml @@ -13,6 +13,6 @@ - name: Restart the plsc container community.docker.docker_container: - name: "plsc" + name: sram-plsc restart: true state: started diff --git a/roles/plsc/tasks/main.yml b/roles/sram-plsc/tasks/main.yml similarity index 96% rename from roles/plsc/tasks/main.yml rename to roles/sram-plsc/tasks/main.yml index 14e7b40b7..0d3900bd9 100644 --- a/roles/plsc/tasks/main.yml +++ b/roles/sram-plsc/tasks/main.yml @@ -15,7 +15,7 @@ - name: Create the plsc container community.docker.docker_container: - name: "plsc" + name: "sram-plsc" image: "{{ plsc.image }}" restart_policy: "always" state: started diff --git a/roles/plsc/templates/plsc.yml.j2 b/roles/sram-plsc/templates/plsc.yml.j2 similarity index 100% rename from roles/plsc/templates/plsc.yml.j2 rename to roles/sram-plsc/templates/plsc.yml.j2 diff --git a/roles/redis/vars/main.yml b/roles/sram-plsc/vars/main.yml similarity index 100% rename from roles/redis/vars/main.yml rename to roles/sram-plsc/vars/main.yml diff --git a/roles/redis/defaults/main.yml b/roles/sram-redis/defaults/main.yml similarity index 67% rename from roles/redis/defaults/main.yml rename to roles/sram-redis/defaults/main.yml index d4eb4b182..31d44935e 100644 --- a/roles/redis/defaults/main.yml +++ b/roles/sram-redis/defaults/main.yml @@ -2,8 +2,8 @@ redis: "{{ redis_defaults | combine(redis_overrides, recursive=true) }}" redis_defaults: image: "docker.io/library/redis:7" - conf_dir: "{{ current_release_appdir }}/redis" - data_dir: "{{ current_release_appdir }}/redis/data" + conf_dir: "{{ current_release_appdir }}/sram/redis" + data_dir: "{{ current_release_appdir }}/sram/redis/data" user: redis group: redis redis_user: default diff --git a/roles/redis/handlers/main.yml b/roles/sram-redis/handlers/main.yml similarity index 83% rename from roles/redis/handlers/main.yml rename to roles/sram-redis/handlers/main.yml index 5ed78e133..b08f0b62b 100644 --- a/roles/redis/handlers/main.yml +++ b/roles/sram-redis/handlers/main.yml @@ -1,6 +1,6 @@ --- - name: Restart redis container community.docker.docker_container: - name: redis + name: sram-redis state: started restart: true diff --git a/roles/redis/tasks/main.yml b/roles/sram-redis/tasks/main.yml similarity index 98% rename from roles/redis/tasks/main.yml rename to roles/sram-redis/tasks/main.yml index 65e7392ef..72789b08f 100644 --- a/roles/redis/tasks/main.yml +++ b/roles/sram-redis/tasks/main.yml @@ -47,7 +47,7 @@ - name: "Create redis container" community.docker.docker_container: - name: "redis" + name: "sram-redis" image: "{{ redis.image }}" restart_policy: "always" state: "started" diff --git a/roles/redis/templates/redis.conf.j2 b/roles/sram-redis/templates/redis.conf.j2 similarity index 100% rename from roles/redis/templates/redis.conf.j2 rename to roles/sram-redis/templates/redis.conf.j2 diff --git a/roles/sbs/vars/main.yml b/roles/sram-redis/vars/main.yml similarity index 100% rename from roles/sbs/vars/main.yml rename to roles/sram-redis/vars/main.yml diff --git a/roles/sbs/defaults/main.yml b/roles/sram-sbs/defaults/main.yml similarity index 99% rename from roles/sbs/defaults/main.yml rename to roles/sram-sbs/defaults/main.yml index aedcee959..8cbe8d109 100644 --- a/roles/sbs/defaults/main.yml +++ b/roles/sram-sbs/defaults/main.yml @@ -36,7 +36,7 @@ sbs_defaults: secret_key_suffix: suffix encryption_key: encryption_key - redis_host: redis + redis_host: sram-redis redis_port: 6379 redis_ssl: false redis_user: default diff --git a/roles/sbs/files/yarn.gpg b/roles/sram-sbs/files/yarn.gpg similarity index 100% rename from roles/sbs/files/yarn.gpg rename to roles/sram-sbs/files/yarn.gpg diff --git a/roles/sbs/handlers/main.yml b/roles/sram-sbs/handlers/main.yml similarity index 76% rename from roles/sbs/handlers/main.yml rename to roles/sram-sbs/handlers/main.yml index 2d7710d43..bc8be505b 100644 --- a/roles/sbs/handlers/main.yml +++ b/roles/sram-sbs/handlers/main.yml @@ -5,5 +5,5 @@ state: started restart: true loop: - - sbs-client - - sbs-server + - sram-sbs-client + - sram-sbs-server diff --git a/roles/sbs/tasks/main.yml b/roles/sram-sbs/tasks/main.yml similarity index 96% rename from roles/sbs/tasks/main.yml rename to roles/sram-sbs/tasks/main.yml index 560191f8a..6881736ec 100644 --- a/roles/sbs/tasks/main.yml +++ b/roles/sram-sbs/tasks/main.yml @@ -60,9 +60,9 @@ # Create dummy file in certs dir to pacify container pre-init script # https://github.com/SURFscz/SBS/pull/2312 - name: "Touch file in {{ sbs.cert_dir }}" - ansible.builtin.file: - path: "{{sbs.cert_dir}}/dummy" - state: file + ansible.builtin.copy: + content: "" + dest: "{{sbs.cert_dir}}/dummy" - name: "Create SBS config files" template: @@ -104,7 +104,7 @@ - name: "Run SBS migrations" throttle: 1 community.docker.docker_container: - name: "sbs-migration" + name: "sram-sbs-migration" image: "{{ sbs.server_image }}" pull: "never" state: "started" @@ -131,14 +131,14 @@ # will not save the output in result - name: "Remove migration container" community.docker.docker_container: - name: "sbs-migration" + name: "sram-sbs-migration" state: "absent" # TODO: fix this by only running this if "sbs_image is changed" changed_when: false - name: "Start sbs client container" community.docker.docker_container: - name: "sbs-client" + name: "sram-sbs-client" image: "{{ sbs.client_image }}" pull: "never" restart_policy: "always" @@ -155,7 +155,7 @@ - name: "Start SBS server container" community.docker.docker_container: - name: "sbs-server" + name: "sram-sbs-server" image: "{{ sbs.server_image }}" restart_policy: "always" state: "started" diff --git a/roles/sbs/templates/alembic.ini.j2 b/roles/sram-sbs/templates/alembic.ini.j2 similarity index 100% rename from roles/sbs/templates/alembic.ini.j2 rename to roles/sram-sbs/templates/alembic.ini.j2 diff --git a/roles/sbs/templates/config.yml.j2 b/roles/sram-sbs/templates/config.yml.j2 similarity index 100% rename from roles/sbs/templates/config.yml.j2 rename to roles/sram-sbs/templates/config.yml.j2 diff --git a/roles/sbs/templates/disclaimer.css.j2 b/roles/sram-sbs/templates/disclaimer.css.j2 similarity index 100% rename from roles/sbs/templates/disclaimer.css.j2 rename to roles/sram-sbs/templates/disclaimer.css.j2 diff --git a/roles/sbs/templates/saml_advanced_settings.json.j2 b/roles/sram-sbs/templates/saml_advanced_settings.json.j2 similarity index 100% rename from roles/sbs/templates/saml_advanced_settings.json.j2 rename to roles/sram-sbs/templates/saml_advanced_settings.json.j2 diff --git a/roles/sbs/templates/saml_settings.json.j2 b/roles/sram-sbs/templates/saml_settings.json.j2 similarity index 100% rename from roles/sbs/templates/saml_settings.json.j2 rename to roles/sram-sbs/templates/saml_settings.json.j2 diff --git a/roles/sbs/templates/sbs-apache.conf.j2 b/roles/sram-sbs/templates/sbs-apache.conf.j2 similarity index 88% rename from roles/sbs/templates/sbs-apache.conf.j2 rename to roles/sram-sbs/templates/sbs-apache.conf.j2 index 0743c2ddb..f0140a845 100644 --- a/roles/sbs/templates/sbs-apache.conf.j2 +++ b/roles/sram-sbs/templates/sbs-apache.conf.j2 @@ -12,10 +12,10 @@ RewriteCond %{DOCUMENT_ROOT}%{REQUEST_FILENAME} !-f RewriteRule ^/(.*)$ /index.html [L] ProxyRequests off -ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sbs-server:8080/ -ProxyPassReverse / http://sbs-server:8080/ -ProxyPass /socket.io/ ws://sbs-server:8080/socket.io/ -ProxyPassReverse /socket.io/ ws://sbs-server:8080/socket.io/ +ProxyPassMatch ^/(api|pam-weblogin|flasgger_static|swagger|health|config|info) http://sram-sbs-server:8080/ +ProxyPassReverse / http://sram-sbs-server:8080/ +ProxyPass /socket.io/ ws://sram-sbs-server:8080/socket.io/ +ProxyPassReverse /socket.io/ ws://sram-sbs-server:8080/socket.io/ Header set Cache-Control: "public, max-age=31536000, immutable" diff --git a/roles/sbs/templates/sbs.service.j2 b/roles/sram-sbs/templates/sbs.service.j2 similarity index 100% rename from roles/sbs/templates/sbs.service.j2 rename to roles/sram-sbs/templates/sbs.service.j2 diff --git a/roles/sram-sbs/vars/main.yml b/roles/sram-sbs/vars/main.yml new file mode 100644 index 000000000..761942f7b --- /dev/null +++ b/roles/sram-sbs/vars/main.yml @@ -0,0 +1 @@ +current_release_appdir: /opt/openconext From f6de5dca6ac9c27390bfc7019591ee4e4381b8fd Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Fri, 27 Mar 2026 16:45:19 +0100 Subject: [PATCH 7/7] Fix haproxy_backend.cfg.j2 --- roles/haproxy/templates/haproxy_backend.cfg.j2 | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/roles/haproxy/templates/haproxy_backend.cfg.j2 b/roles/haproxy/templates/haproxy_backend.cfg.j2 index ea52ac5b0..8ef005da4 100644 --- a/roles/haproxy/templates/haproxy_backend.cfg.j2 +++ b/roles/haproxy/templates/haproxy_backend.cfg.j2 @@ -81,5 +81,4 @@ backend ldap_servers {% for server in haproxy_ldap_servers -%} server {{server.label}} {{server.ip}}:{{server.port}} ssl verify none check weight 10 {% if loop.index==1 %}on-marked-up shutdown-backup-sessions{% else %}backup{% endif %} {% endfor %} -{% endfor %} -{% endig %} +{% endif %}