Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions ansible/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Deployment with Ansible

Perform automated deployment of SwarmPAL-Processor to a host running Ubuntu 22.04.

## Setup

Launce an Ubuntu 22.04 instance in a cloud provider using a no passphrase ssh keypair.
Add the following section in `~/.ssh/config`:
```
Host swarmpal-dev
Hostname <server IP or URL>
User ubuntu
IdentityFile ~/.ssh/<ssh private key>
IdentitiesOnly yes
```

In this directory run:
```bash
$ ansible ping -i inventory.yaml -m ping
$ ansible-playbook -i inventory.yaml swarmpal_processor_dev.yaml --diff
```

## TODO

Possible improvements:

* Create a separate user to keep configuration files.
* Support different Linux distributions
* Use GitHub workflows to create the Docker image.
* Use an upstream Docker image.
7 changes: 7 additions & 0 deletions ansible/inventory.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
all:
hosts:
swarmpal-dev:
ansible_host: swarmpal-dev
ansible_user: ubuntu
ansible_python_interpreter: /usr/bin/python3
5 changes: 5 additions & 0 deletions ansible/roles/swarmpal/handlers/main.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
- name: Restart Dockerd
service:
name: docker
daemon_reload: true
state: restarted
63 changes: 63 additions & 0 deletions ansible/roles/swarmpal/tasks/main.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
---
- name: Update and upgrade apt packages
become: true
apt:
upgrade: yes
update_cache: yes

- name: Install dependancies
apt:
name: "{{ item }}"
loop:
- docker
- docker-compose

- name: Update Networking MTU
# See: https://www.civo.com/learn/fixing-networking-for-docker
lineinfile:
path: /lib/systemd/system/docker.service
regexp: "^ExecStart=/usr/bin/dockerd"
line: "ExecStart=/usr/bin/dockerd --mtu 1450 -H fd:// --containerd=/run/containerd/containerd.sock"
when: ansible_distribution == "Ubuntu"
notify: Restart Dockerd

- name: Run handlers now, not after all tasks
meta: flush_handlers

- name: Create the .env file
become: true
become_user: ubuntu
template:
src: env
dest: "/home/ubuntu/.env"
mode: "600"

- name: "Create directory for nginx config"
become: true
become_user: ubuntu
file:
dest: "/home/ubuntu/nginx_conf.d"
state: directory

- name: "Copy nginx config"
become: true
become_user: ubuntu
template:
src: nginx_default.conf
dest: "/home/ubuntu/nginx_conf.d/default.conf"

- name: "Copy docker-compose.yaml"
become: true
become_user: ubuntu
template:
src: docker-compose.yaml
dest: "/home/ubuntu/docker-compose.yaml"

- name: "docker-compose down"
docker_compose:
project_src: "/home/ubuntu"
state: absent

- name: "docker-compose up"
docker_compose:
project_src: "/home/ubuntu"
37 changes: 37 additions & 0 deletions ansible/roles/swarmpal/templates/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
---
version: '3.4'
services:
swarmpal-processor:
image: ghcr.io/dawiedotcom/swarmpal-processor:test
networks:
- swarmpal-vlan
env_file: /home/ubuntu/.env
command: /app/start-dashboard.sh

nginx:
image: nginx
restart: always
depends_on:
- swarmpal-processor
networks:
- swarmpal-vlan
ports:
- "80:80"
- "443:443"
read_only: true
tmpfs:
- /run
- /tmp
- /var/cache/nginx
volumes:
- "/home/ubuntu/nginx_conf.d:/etc/nginx/conf.d"

networks:
swarmpal-vlan:
driver: bridge
driver_opts:
com.docker.network.driver.mtu: 1450
ipam:
config:
- subnet: 172.22.0.0/16
gateway: 172.22.0.1
4 changes: 4 additions & 0 deletions ansible/roles/swarmpal/templates/env
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
VIRES_TOKEN={{ lookup('env', 'VIRES_TOKEN') }}
FTP_SERVER={{ lookup('env', 'FTP_SERVER') }}
FTP_USERNAME={{ lookup('env', 'FTP_USERNAME') }}
FTP_PASSWORD={{ lookup('env', 'FTP_PASSWORD') }}
25 changes: 25 additions & 0 deletions ansible/roles/swarmpal/templates/nginx_default.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@

map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}

server {
listen 80;
listen [::]:80;

server_name localhost;

location / {
proxy_pass http://swarmpal-processor:5006/;
}

location /app/ws {
proxy_pass http://swarmpal-processor:5006/app/ws;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Host $host;
proxy_set_header Origin "http://localhost:5006";
}
}
6 changes: 6 additions & 0 deletions ansible/swarmpal_processor_dev.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
- name: Setup a development instance of SwarmPAL-processor
hosts: swarmpal-dev
become: true
roles:
- swarmpal
82 changes: 40 additions & 42 deletions tasks/fac-fast-processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,16 @@
# ---

# %%
import argparse
import asyncio
import datetime as dt
import logging
import os
import re
import sched
import subprocess
import sys
import time
from ftplib import FTP

from dotenv import dotenv_values
from swarmpal.toolboxes.fac.presets import fac_single_sat
Expand Down Expand Up @@ -52,6 +54,10 @@ def configure_logging():
# %%
def get_latest_evaluated(directory) -> "datetime":
"""Scan local directory to identify latest time in files"""

if not os.path.exists(directory):
os.makedirs(directory)

dir_contents = os.listdir(directory)
product_naming = r"SW_(FAST|OPER)_FAC(A|B|C)TMS_2F_(\d{8}T\d{6})_(\d{8}T\d{6})_.{4}\.(cdf|CDF)"
matched_files = [s for s in dir_contents if re.search(product_naming, s)]
Expand All @@ -67,10 +73,10 @@ def get_latest_evaluated(directory) -> "datetime":


# %%
def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", remote_directory=None, wait_time=WAIT_TIME):
def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", wait_time=WAIT_TIME):
collection_mag = f"SW_FAST_MAG{swarm_spacecraft}_LR_1B"
# Check server for latest time in online products
LOGGER.info("Checking product availability...")
LOGGER.info(f"Checking product availability: {collection_mag}")
t_latest_on_server = last_available_time(collection_mag).replace(microsecond=0)
LOGGER.info(f"Latest availability for {collection_mag}: {t_latest_on_server}")
# Check saved files for latest time evaluated
Expand All @@ -91,55 +97,47 @@ def job(swarm_spacecraft="A", starting_time=None, output_directory="outputs", re
# Prepare the next starting time to be the current end time
t_latest_evaluated = t_end
LOGGER.info(f"New data saved: {output_name}. Waiting to check again ({wait_time}s)")
# Upload the file to FTP
if remote_directory:
upload_to_ftp(output_name, remote_directory)
LOGGER.info(f"Waiting to check again ({wait_time}s)")
else:
LOGGER.info(f"No new data available. Waiting to check again ({wait_time}s)")

# Schedule next job run
SCHEDULE.enter(wait_time, 1, job, (swarm_spacecraft, starting_time, output_directory, remote_directory, wait_time))
SCHEDULE.enter(wait_time, 1, job, (swarm_spacecraft, starting_time, output_directory, wait_time))


# %%
def get_ftp_server_credentials(env_file="../.env"):
env_vars = dotenv_values(env_file)
server = env_vars.get("FTP_SERVER")
username = env_vars.get("FTP_USERNAME")
password = env_vars.get("FTP_PASSWORD")
return {"server": server, "username":username, "password":password}


def upload_to_ftp(local_file, remote_directory):
credentials = get_ftp_server_credentials()
try:
ftp = FTP(credentials["server"])
ftp.login(credentials["username"], credentials["password"])
ftp.cwd(remote_directory)
with open(local_file, "rb") as file:
ftp.storbinary("STOR " + local_file.split('/')[-1], file)
LOGGER.info(f"Successfully uploaded: {local_file} to remote: {remote_directory}")
except Exception as e:
LOGGER.error(f"Failed to upload {local_file} to remote: {remote_directory}\n{e}")
raise e
finally:
ftp.quit()
def start_job(spacecraft, output_directory):
LOGGER.info(f"Beginning FAC FAST processor for Swarm {spacecraft}. Saving results to {output_directory}.")
# Begin 3 days ago if output_directory is empty
t0 = dt.datetime.now().date() - dt.timedelta(days=3)
SCHEDULE.enter(0, 1, job, (spacecraft, t0, output_directory, WAIT_TIME))


# %%
def main(spacecraft, output_directory, remote_directory):
LOGGER.info(f"Beginning FAC FAST processor for Swarm {spacecraft}")
# Begin 3 days ago if output_directory is empty
t0 = dt.datetime.now().date() - dt.timedelta(days=3)
SCHEDULE.enter(0, 1, job, (spacecraft, t0, output_directory, remote_directory, WAIT_TIME))
SCHEDULE.run()
def main():
parser = argparse.ArgumentParser(
prog='fac-fast-processor.py',
description='...' # TODO
)
parser.add_argument(
'-o', '--output-dir',
action='store',
default='outputs',
help='Location, on local disk, for output files'
)
parser.add_argument(
'-r', '--remote-dir',
action='store',
default='FAC/TMS',
help='Location, on remote server, to sync output files to'
)
args = parser.parse_args()

subprocess.Popen(['./inotifywait_rsync.sh', args.output_dir, args.remote_dir])

for sat in ['A', 'B', 'C']:
start_job(sat, os.path.join(args.output_dir, f'Sat_{sat}'))

SCHEDULE.run()

if __name__ == "__main__":
if "get_ipython" in globals():
main(spacecraft="A", output_directory="outputs/Sat_A", remote_directory="FAC/TMS/Sat_A")
else:
if len(sys.argv) != 4:
print("Usage: python fac-fast-processor.py <spacecraft-letter> <output-dir> <remote-directory>")
main(sys.argv[1], sys.argv[2], sys.argv[3])
main()
13 changes: 13 additions & 0 deletions tasks/inotifywait_rsync.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

# Create watched directories if they don't exist.
# rsync will fail otherwise.
if [ ! -d $1 ]; then
mkdir -p $1
fi

# Whach the local output directory and rsync with
# the remote output directory.
while inotifywait -r $1; do
rsync -avz $1 "$REMOTE_SERVER:$2"
done