Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,5 @@ venv

config.py

stored_procedures
stored_procedures
.vs/
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,11 @@ def set_id_in_csv_with_asset_id(asset_table: Table, assets_from_csv: list[dict])
if asset_id is None:
asset_item["failed_reason"] = f"Invalid assetId: {asset_item['prop_ref']}. "
continue
results = get_by_secondary_index(asset_table, "AssetId", "assetId", asset_id)
results = get_by_secondary_index(
asset_table, "AssetId", "assetId", asset_id)
if len(results) > 1:
asset_item["failed_reason"] = f"Multiple assets found for assetId {asset_item['prop_ref']}. "
asset_item[
"failed_reason"] = f"Multiple assets found for assetId {asset_item['prop_ref']}. "
continue
elif len(results) == 0:
asset_item["failed_reason"] = f"No assets found for assetId {asset_item['prop_ref']}. "
Expand All @@ -79,4 +81,5 @@ def main():
headings = asset_with_ids[0].keys()
f.write("\t".join(headings) + "\n")
for asset in asset_with_ids:
f.write("\t".join([str(asset[heading]) for heading in headings]) + "\n")
f.write("\t".join([str(asset[heading])
for heading in headings]) + "\n")
103 changes: 103 additions & 0 deletions aws/database/dynamodb/scripts/asset_table/update_property_patch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from dataclasses import dataclass

from mypy_boto3_dynamodb.service_resource import Table

from aws.database.dynamodb.utils.get_by_secondary_index import get_by_secondary_index
from aws.database.dynamodb.utils.get_dynamodb_table import get_dynamodb_table
from aws.utils.csv_to_dict_list import csv_to_dict_list
from aws.utils.logger import Logger
from enums.enums import Stage
from aws.utils.progress_bar import ProgressBar
from aws.database.domain.dynamo_domain_objects import Asset

import requests
import os
import re
from dotenv import load_dotenv
from utils.confirm import confirm


load_dotenv()


@dataclass
class Config:
TABLE_NAME = "Assets"
OUTPUT_CLASS = Asset
LOGGER = Logger()
STAGE = Stage.HOUSING_STAGING
ITEM_COUNT_LIMIT = 10 # Set to None to return all items
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note you're not using this



def clean_asset_id(asset_id: str) -> str | None:
asset_id = str(asset_id)
asset_id = asset_id.replace(" ", "")
if len(asset_id) < 8:
# pad with zeros
asset_id = asset_id.zfill(8)
if asset_id.isnumeric():
if len(asset_id) > 8:
return None
propref_regex = re.compile(r"^([0-9]{8})$")
asset_id = propref_regex.findall(asset_id)[0]
return asset_id
return None


def update_areaid_patchid_in_asset(assets_table: Table, patch_from_csv: list[dict], logger: Logger):
# get all patches and areas endpoint
url = os.getenv('GETALLPATCHESANDAREAAPIURL')
token = os.getenv('AUTH_TOKEN')
if None not in (url, token):
response = requests.get(url, headers={'Authorization': token})
allPatchesAndAreas = response.json()

progress_bar = ProgressBar(len(patch_from_csv))
# get asset record based on prop ref from csv
for i, item in enumerate(patch_from_csv):
if i % 10 == 0:
progress_bar.display(i)
asset_id = item["prop_ref"]
asset_id = clean_asset_id(asset_id)
if asset_id is None:
logger.log(f"Invalid assetId: {item['prop_ref']}. ")
continue
results = get_by_secondary_index(
assets_table, "AssetId", "assetId", asset_id)
if len(results) > 1:
logger.log(
f"Multiple assets found for assetId {item['prop_ref']}. ")
continue
elif len(results) == 0:
logger.log(f"No assets found for assetId {item['prop_ref']}. ")
continue
asset = results[0]
patchName = item["name"]
if not patchName:
logger.log(f'patchName is not given for propertyRef {asset_id}')
continue
# //get patch object based on patch name given in csv
for patch in allPatchesAndAreas:
if patch['name'] == patchName:
patchId = patch['id']
areaId = patch['parentId']
asset["patchId"] = patchId if patchId else None
asset["areaId"] = areaId if areaId else None
Comment on lines +82 to +85
Copy link
Contributor

@adamtry adamtry Mar 28, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not a big deal but this would be more robust. ".get" returns None if the value isn't found but [] raises an error

asset["patchId"] = patch.get("id")
asset["areaId"] = patch.get("parentId")

logger.log(
f'updating areaid to {asset["areaId"]} and patchId to {asset["patchId"]} for prop_ref {asset_id}')
assets_table.put_item(Item=asset)
logger.log(f"UPDATED {asset['id']}")


def update_property_patch():
table = get_dynamodb_table(Config.TABLE_NAME, Config.STAGE)
_file_path = r"/workspaces/mtfh-scripts/aws/Propert Patch List for script STAGING.csv"
patch_csv_data = csv_to_dict_list(_file_path)

if confirm(f"Are you sure you want to update property patch in {Config.STAGE.to_env_name()}?"):
update_areaid_patchid_in_asset(
table, patch_csv_data, Config.LOGGER)


if __name__ == "__main__":
update_property_patch()
31 changes: 31 additions & 0 deletions aws/database/opensearch/housing_search/dummy-data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import json
from elasticsearch import Elasticsearch


DUMB_ITEMS = []
with open("assets.json", "r") as f:
DUMB_ITEMS = json.load(f)


def elastic_search():
index = "assets"
# create an elasticsearch client
es = Elasticsearch([{"host": "localhost", "port": 9200}])

# check connection
if not es.ping():
raise ValueError("Connection failed")

# create index if it does not exist
if not es.indices.exists(index):
es.indices.create(index=index)

# clear all data in the index
es.delete_by_query(index=index, body={"query": {"match_all": {}}})

for item in DUMB_ITEMS:
es.index(index=index, id=item["id"], body=item)


if __name__ == "__main__":
elastic_search()
57 changes: 57 additions & 0 deletions aws/database/rds/repairs/BonusCalc/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
.ONESHELL:
# Requires AWS CLI Profile matching housing-${ENVIRONMENT} to be set up
# Requires AWS Session Manager Plugin to be installed:
# https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html
# On Windows you will need to run these commands using Git Bash, NOT Powershell / CMD


# -- Configuration --
# Set the local port to use for the port forwarding - connect to this port on your local machine to connect to the RDS
LOCAL_PORT = 6005
# For Parameter store URL Paths
STAGE := staging
# Set to AWSCLI Profile names
PROFILE := "housing-${STAGE}"

# -- Parameter Store paths --
JUMP_BOX_INSTANCE_NAME_PATH:="platform-apis-jump-box-instance-name"
POSTGRES_HOST_PATH:=" /bonuscalc-api/${STAGE}/postgres-hostname"
POSTGRES_PORT_PATH:=" /bonuscalc-api/${STAGE}/postgres-port"
POSTGRES_USERNAME_PATH:=" /bonuscalc-api/${STAGE}/postgres-username"
POSTGRES_PASSWORD_PATH:=" /bonuscalc-api/${STAGE}/postgres-password"

# -- Parameters --
# Get parameters from parameter store for the profile used
INSTANCE_ID := $(shell aws ssm get-parameter --name ${JUMP_BOX_INSTANCE_NAME_PATH} --region "eu-west-2" --profile ${PROFILE} --query Parameter.Value --output text)
REMOTE_HOST := $(shell aws ssm get-parameter --name ${POSTGRES_HOST_PATH} --region "eu-west-2" --profile ${PROFILE} --query Parameter.Value --output text)
REMOTE_PORT := $(shell aws ssm get-parameter --name ${POSTGRES_PORT_PATH} --region "eu-west-2" --profile ${PROFILE} --query Parameter.Value --output text)
REMOTE_USERNAME := $(shell aws ssm get-parameter --name ${POSTGRES_USERNAME_PATH} --region "eu-west-2" --profile ${PROFILE} --query Parameter.Value --output text)
REMOTE_PASSWORD := $(shell aws ssm get-parameter --with-decryption --name ${POSTGRES_PASSWORD_PATH} --region "eu-west-2" --profile ${PROFILE} --query Parameter.Value --output text)

DATABASE_PARAMS = '{"host":["${REMOTE_HOST}"], "portNumber":["${REMOTE_PORT}"], "localPortNumber":["${LOCAL_PORT}"]}'

# -- Commands --

# Use this command to login to the AWS SSO service
# This is required to use the ssm commands
sso_login:
if (aws sts get-caller-identity --profile ${PROFILE})
then
echo "Session still valid"
else
echo "Session expired, logging in"
aws sso login --profile ${PROFILE}
fi

# Use this command to connect to create a port forwarding session from localhost to the RDS instance via the jump-box
# This will allow connecting to the database using a GUI tool like pgAdmin, or with local scripts
port_forwarding_to_bonus_calc:
echo USERNAME: ${REMOTE_USERNAME}
echo PASSWORD: ${REMOTE_PASSWORD}

aws ssm start-session \
--target ${INSTANCE_ID} \
--region eu-west-2 \
--profile ${PROFILE} \
--document-name AWS-StartPortForwardingSessionToRemoteHost \
--parameters ${DATABASE_PARAMS};
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ google>=3.0.0
google-api-python-client>=2.86.0
pyperclip~=1.8.2
python-dateutil~=2.8.2
pyodbc~=5.2.0
# pyodbc~=5.2.0
requests~=2.31.0
Faker~=22.0.0
elasticsearch~=7.10.1
pre-commit>=3.5.0
psycopg2-binary>=2.9.9
numpy<2
numpy>2