diff --git a/ansible/README.md b/ansible/README.md new file mode 100644 index 0000000..c2c210d --- /dev/null +++ b/ansible/README.md @@ -0,0 +1,30 @@ +# Deployment with Ansible + +Perform automated deployment of SwarmPAL-Processor to a host running Ubuntu 22.04. + +## Setup + +Launce an Ubuntu 22.04 instance in a cloud provider using a no passphrase ssh keypair. +Add the following section in `~/.ssh/config`: +``` +Host swarmpal-dev + Hostname + User ubuntu + IdentityFile ~/.ssh/ + IdentitiesOnly yes +``` + +In this directory run: +```bash +$ ansible ping -i inventory.yaml -m ping +$ ansible-playbook -i inventory.yaml swarmpal_processor_dev.yaml --diff +``` + +## TODO + +Possible improvements: + + * Create a separate user to keep configuration files. + * Support different Linux distributions + * Use GitHub workflows to create the Docker image. + * Use an upstream Docker image. diff --git a/ansible/inventory.yaml b/ansible/inventory.yaml new file mode 100644 index 0000000..a1b4b6d --- /dev/null +++ b/ansible/inventory.yaml @@ -0,0 +1,7 @@ +--- +all: + hosts: + swarmpal-dev: + ansible_host: swarmpal-dev + ansible_user: ubuntu + ansible_python_interpreter: /usr/bin/python3 diff --git a/ansible/roles/swarmpal/handlers/main.yaml b/ansible/roles/swarmpal/handlers/main.yaml new file mode 100644 index 0000000..55215cb --- /dev/null +++ b/ansible/roles/swarmpal/handlers/main.yaml @@ -0,0 +1,5 @@ +- name: Restart Dockerd + service: + name: docker + daemon_reload: true + state: restarted diff --git a/ansible/roles/swarmpal/tasks/main.yaml b/ansible/roles/swarmpal/tasks/main.yaml new file mode 100644 index 0000000..f6ecce4 --- /dev/null +++ b/ansible/roles/swarmpal/tasks/main.yaml @@ -0,0 +1,63 @@ +--- +- name: Update and upgrade apt packages + become: true + apt: + upgrade: yes + update_cache: yes + +- name: Install dependancies + apt: + name: "{{ item }}" + loop: + - docker + - docker-compose + +- name: Update Networking MTU + # See: https://www.civo.com/learn/fixing-networking-for-docker + lineinfile: + path: /lib/systemd/system/docker.service + regexp: "^ExecStart=/usr/bin/dockerd" + line: "ExecStart=/usr/bin/dockerd --mtu 1450 -H fd:// --containerd=/run/containerd/containerd.sock" + when: ansible_distribution == "Ubuntu" + notify: Restart Dockerd + +- name: Run handlers now, not after all tasks + meta: flush_handlers + +- name: Create the .env file + become: true + become_user: ubuntu + template: + src: env + dest: "/home/ubuntu/.env" + mode: "600" + +- name: "Create directory for nginx config" + become: true + become_user: ubuntu + file: + dest: "/home/ubuntu/nginx_conf.d" + state: directory + +- name: "Copy nginx config" + become: true + become_user: ubuntu + template: + src: nginx_default.conf + dest: "/home/ubuntu/nginx_conf.d/default.conf" + +- name: "Copy docker-compose.yaml" + become: true + become_user: ubuntu + template: + src: docker-compose.yaml + dest: "/home/ubuntu/docker-compose.yaml" + +- name: "docker-compose down" + docker_compose: + project_src: "/home/ubuntu" + state: absent + +- name: "docker-compose up" + docker_compose: + project_src: "/home/ubuntu" diff --git a/ansible/roles/swarmpal/templates/docker-compose.yaml b/ansible/roles/swarmpal/templates/docker-compose.yaml new file mode 100644 index 0000000..e83af65 --- /dev/null +++ b/ansible/roles/swarmpal/templates/docker-compose.yaml @@ -0,0 +1,37 @@ +--- +version: '3.4' +services: + swarmpal-processor: + image: ghcr.io/dawiedotcom/swarmpal-processor:test + networks: + - swarmpal-vlan + env_file: /home/ubuntu/.env + command: /app/start-dashboard.sh + + nginx: + image: nginx + restart: always + depends_on: + - swarmpal-processor + networks: + - swarmpal-vlan + ports: + - "80:80" + - "443:443" + read_only: true + tmpfs: + - /run + - /tmp + - /var/cache/nginx + volumes: + - "/home/ubuntu/nginx_conf.d:/etc/nginx/conf.d" + +networks: + swarmpal-vlan: + driver: bridge + driver_opts: + com.docker.network.driver.mtu: 1450 + ipam: + config: + - subnet: 172.22.0.0/16 + gateway: 172.22.0.1 diff --git a/ansible/roles/swarmpal/templates/env b/ansible/roles/swarmpal/templates/env new file mode 100644 index 0000000..e464688 --- /dev/null +++ b/ansible/roles/swarmpal/templates/env @@ -0,0 +1,4 @@ +VIRES_TOKEN={{ lookup('env', 'VIRES_TOKEN') }} +FTP_SERVER={{ lookup('env', 'FTP_SERVER') }} +FTP_USERNAME={{ lookup('env', 'FTP_USERNAME') }} +FTP_PASSWORD={{ lookup('env', 'FTP_PASSWORD') }} diff --git a/ansible/roles/swarmpal/templates/nginx_default.conf b/ansible/roles/swarmpal/templates/nginx_default.conf new file mode 100644 index 0000000..915ba33 --- /dev/null +++ b/ansible/roles/swarmpal/templates/nginx_default.conf @@ -0,0 +1,25 @@ + +map $http_upgrade $connection_upgrade { + default upgrade; + '' close; +} + +server { + listen 80; + listen [::]:80; + + server_name localhost; + + location / { + proxy_pass http://swarmpal-processor:5006/; + } + + location /app/ws { + proxy_pass http://swarmpal-processor:5006/app/ws; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header Origin "http://localhost:5006"; + } +} diff --git a/ansible/swarmpal_processor_dev.yaml b/ansible/swarmpal_processor_dev.yaml new file mode 100644 index 0000000..096f411 --- /dev/null +++ b/ansible/swarmpal_processor_dev.yaml @@ -0,0 +1,6 @@ +--- +- name: Setup a development instance of SwarmPAL-processor + hosts: swarmpal-dev + become: true + roles: + - swarmpal diff --git a/tasks/fac-fast-processor.py b/tasks/fac-fast-processor.py index 2ce8da1..6c86785 100644 --- a/tasks/fac-fast-processor.py +++ b/tasks/fac-fast-processor.py @@ -14,14 +14,16 @@ # --- # %% +import argparse +import asyncio import datetime as dt import logging import os import re import sched +import subprocess import sys import time -from ftplib import FTP from dotenv import dotenv_values from swarmpal.toolboxes.fac.presets import fac_single_sat @@ -52,6 +54,10 @@ def configure_logging(): # %% def get_latest_evaluated(directory) -> "datetime": """Scan local directory to identify latest time in files""" + + if not os.path.exists(directory): + os.makedirs(directory) + dir_contents = os.listdir(directory) product_naming = r"SW_(FAST|OPER)_FAC(A|B|C)TMS_2F_(\d{8}T\d{6})_(\d{8}T\d{6})_.{4}\.(cdf|CDF)" matched_files = [s for s in dir_contents if re.search(product_naming, s)] @@ -67,10 +73,10 @@ def get_latest_evaluated(directory) -> "datetime": # %% -def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", remote_directory=None, wait_time=WAIT_TIME): +def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", wait_time=WAIT_TIME): collection_mag = f"SW_FAST_MAG{swarm_spacecraft}_LR_1B" # Check server for latest time in online products - LOGGER.info("Checking product availability...") + LOGGER.info(f"Checking product availability: {collection_mag}") t_latest_on_server = last_available_time(collection_mag).replace(microsecond=0) LOGGER.info(f"Latest availability for {collection_mag}: {t_latest_on_server}") # Check saved files for latest time evaluated @@ -91,55 +97,47 @@ def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", re # Prepare the next starting time to be the current end time t_latest_evaluated = t_end LOGGER.info(f"New data saved: {output_name}. Waiting to check again ({wait_time}s)") - # Upload the file to FTP - if remote_directory: - upload_to_ftp(output_name, remote_directory) - LOGGER.info(f"Waiting to check again ({wait_time}s)") else: LOGGER.info(f"No new data available. Waiting to check again ({wait_time}s)") # Schedule next job run - SCHEDULE.enter(wait_time, 1, job, (swarm_spacecraft, starting_time, output_directory, remote_directory, wait_time)) + SCHEDULE.enter(wait_time, 1, job, (swarm_spacecraft, starting_time, output_directory, wait_time)) # %% -def get_ftp_server_credentials(env_file="../.env"): - env_vars = dotenv_values(env_file) - server = env_vars.get("FTP_SERVER") - username = env_vars.get("FTP_USERNAME") - password = env_vars.get("FTP_PASSWORD") - return {"server": server, "username":username, "password":password} - - -def upload_to_ftp(local_file, remote_directory): - credentials = get_ftp_server_credentials() - try: - ftp = FTP(credentials["server"]) - ftp.login(credentials["username"], credentials["password"]) - ftp.cwd(remote_directory) - with open(local_file, "rb") as file: - ftp.storbinary("STOR " + local_file.split('/')[-1], file) - LOGGER.info(f"Successfully uploaded: {local_file} to remote: {remote_directory}") - except Exception as e: - LOGGER.error(f"Failed to upload {local_file} to remote: {remote_directory}\n{e}") - raise e - finally: - ftp.quit() +def start_job(spacecraft, output_directory): + LOGGER.info(f"Beginning FAC FAST processor for Swarm {spacecraft}. Saving results to {output_directory}.") + # Begin 3 days ago if output_directory is empty + t0 = dt.datetime.now().date() - dt.timedelta(days=3) + SCHEDULE.enter(0, 1, job, (spacecraft, t0, output_directory, WAIT_TIME)) # %% -def main(spacecraft, output_directory, remote_directory): - LOGGER.info(f"Beginning FAC FAST processor for Swarm {spacecraft}") - # Begin 3 days ago if output_directory is empty - t0 = dt.datetime.now().date() - dt.timedelta(days=3) - SCHEDULE.enter(0, 1, job, (spacecraft, t0, output_directory, remote_directory, WAIT_TIME)) - SCHEDULE.run() +def main(): + parser = argparse.ArgumentParser( + prog='fac-fast-processor.py', + description='...' # TODO + ) + parser.add_argument( + '-o', '--output-dir', + action='store', + default='outputs', + help='Location, on local disk, for output files' + ) + parser.add_argument( + '-r', '--remote-dir', + action='store', + default='FAC/TMS', + help='Location, on remote server, to sync output files to' + ) + args = parser.parse_args() + + subprocess.Popen(['./inotifywait_rsync.sh', args.output_dir, args.remote_dir]) + + for sat in ['A', 'B', 'C']: + start_job(sat, os.path.join(args.output_dir, f'Sat_{sat}')) + SCHEDULE.run() if __name__ == "__main__": - if "get_ipython" in globals(): - main(spacecraft="A", output_directory="outputs/Sat_A", remote_directory="FAC/TMS/Sat_A") - else: - if len(sys.argv) != 4: - print("Usage: python fac-fast-processor.py ") - main(sys.argv[1], sys.argv[2], sys.argv[3]) + main() diff --git a/tasks/inotifywait_rsync.sh b/tasks/inotifywait_rsync.sh new file mode 100755 index 0000000..15f4e72 --- /dev/null +++ b/tasks/inotifywait_rsync.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Create watched directories if they don't exist. +# rsync will fail otherwise. +if [ ! -d $1 ]; then + mkdir -p $1 +fi + +# Whach the local output directory and rsync with +# the remote output directory. +while inotifywait -r $1; do + rsync -avz $1 "$REMOTE_SERVER:$2" +done