From 5f226767ad6dc0af1914906d671511b6d646e818 Mon Sep 17 00:00:00 2001 From: vapor-forensics Date: Sat, 1 Mar 2025 21:41:53 +1000 Subject: [PATCH] dev --- apps/analysis/detection_rules/aws_rules.yaml | 8 +- .../analysis/detection_rules/azure_rules.yaml | 52 +++ apps/analysis/urls.py | 9 +- apps/analysis/views.py | 55 ++- apps/azure/admin.py | 6 + apps/azure/forms.py | 52 +++ apps/azure/migrations/0001_initial.py | 91 ++++ apps/azure/models.py | 123 ++++- apps/azure/tasks.py | 63 +++ apps/azure/urls.py | 41 ++ apps/azure/utils.py | 314 +++++++++++++ apps/azure/views.py | 441 +++++++++++++++++- apps/case/views.py | 10 +- .../0007_normalizedlog_azure_account.py | 20 + .../0008_alter_normalizedlog_aws_account.py | 20 + apps/data/models.py | 3 +- apps/data/views.py | 49 +- apps/gcp/forms.py | 52 +++ apps/gcp/migrations/0001_initial.py | 63 +++ apps/gcp/models.py | 69 ++- apps/gcp/tasks.py | 17 + apps/gcp/urls.py | 23 + apps/gcp/utils.py | 196 ++++++++ apps/gcp/views.py | 263 ++++++++++- requirements/dev-requirements.txt | 57 --- requirements/requirements.in | 21 +- requirements/requirements.txt | 180 ++++++- scope/urls.py | 5 +- static/images/cloud/azure.svg | 23 + static/images/cloud/gcp.svg | 14 + templates/analysis/case_detections.html | 28 ++ templates/azure/account_resources.html | 272 +++++++++++ templates/azure/connect_azure.html | 102 ++++ templates/azure/edit_account.html | 75 +++ templates/azure/get_logs.html | 126 +++++ templates/azure/logsource_details.html | 84 ++++ templates/azure/resource_details.html | 89 ++++ templates/case/case_detail.html | 93 +++- templates/case/connect_client.html | 35 +- templates/data/normalized_logs.html | 38 +- templates/gcp/account_resources.html | 270 +++++++++++ templates/gcp/connect_gcp.html | 111 +++++ templates/gcp/edit_account.html | 103 ++++ templates/gcp/logsource_details.html | 84 ++++ templates/gcp/resource_details.html | 86 ++++ 45 files changed, 3821 insertions(+), 115 deletions(-) create mode 100644 apps/analysis/detection_rules/azure_rules.yaml create mode 100644 apps/azure/forms.py create mode 100644 apps/azure/migrations/0001_initial.py create mode 100644 apps/azure/tasks.py create mode 100644 apps/azure/urls.py create mode 100644 apps/azure/utils.py create mode 100644 apps/data/migrations/0007_normalizedlog_azure_account.py create mode 100644 apps/data/migrations/0008_alter_normalizedlog_aws_account.py create mode 100644 apps/gcp/forms.py create mode 100644 apps/gcp/migrations/0001_initial.py create mode 100644 apps/gcp/tasks.py create mode 100644 apps/gcp/urls.py create mode 100644 apps/gcp/utils.py create mode 100644 static/images/cloud/azure.svg create mode 100644 static/images/cloud/gcp.svg create mode 100644 templates/azure/account_resources.html create mode 100644 templates/azure/connect_azure.html create mode 100644 templates/azure/edit_account.html create mode 100644 templates/azure/get_logs.html create mode 100644 templates/azure/logsource_details.html create mode 100644 templates/azure/resource_details.html create mode 100644 templates/gcp/account_resources.html create mode 100644 templates/gcp/connect_gcp.html create mode 100644 templates/gcp/edit_account.html create mode 100644 templates/gcp/logsource_details.html create mode 100644 templates/gcp/resource_details.html diff --git a/apps/analysis/detection_rules/aws_rules.yaml b/apps/analysis/detection_rules/aws_rules.yaml index 412819f..8abead3 100644 --- a/apps/analysis/detection_rules/aws_rules.yaml +++ b/apps/analysis/detection_rules/aws_rules.yaml @@ -7,7 +7,7 @@ severity: "medium" event_source: "sts.amazonaws.com" event_name: "GetCallerIdentity" - auto_tags: ["suspicious", "reconnaissance"] + auto_tags: ["Suspicious", "Medium"] enabled: true - name: "Root Account Usage" @@ -17,7 +17,7 @@ severity: "high" event_source: "signin.amazonaws.com" additional_criteria: {"user_identity": "root"} - auto_tags: ["high-risk", "compliance-violation"] + auto_tags: ["High", "Suspicious"] enabled: true - name: "Security Group Modification" @@ -27,7 +27,7 @@ severity: "medium" event_source: "ec2.amazonaws.com" event_name: "AuthorizeSecurityGroupIngress" - auto_tags: ["security-group-change", "network-modification"] + auto_tags: ["Medium", "Informational"] enabled: true - name: "IAM Policy Changes" @@ -37,5 +37,5 @@ severity: "high" event_source: "iam.amazonaws.com" event_name: "PutRolePolicy" - auto_tags: ["iam-change", "privilege-escalation"] + auto_tags: ["High", "Suspicious"] enabled: true \ No newline at end of file diff --git a/apps/analysis/detection_rules/azure_rules.yaml b/apps/analysis/detection_rules/azure_rules.yaml new file mode 100644 index 0000000..d26b7f5 --- /dev/null +++ b/apps/analysis/detection_rules/azure_rules.yaml @@ -0,0 +1,52 @@ +# Azure Pre-built Detection Rules + +- name: "Azure Sign-in from Unknown Location" + description: "Detects sign-in attempts from unusual or unauthorized locations" + cloud: "azure" + detection_type: "login" + severity: "medium" + event_source: "Microsoft.Azure.ActiveDirectory" + event_name: "Sign-in activity" + additional_criteria: {"raw_data_contains": "unfamiliarFeatures"} + auto_tags: ["Suspicious", "Medium"] + enabled: true + +- name: "Azure Role Assignment Change" + description: "Detects changes to role assignments which could indicate privilege escalation" + cloud: "azure" + detection_type: "iam" + severity: "high" + event_source: "Microsoft.Authorization" + event_name: "Microsoft.Authorization/roleAssignments/write" + auto_tags: ["High", "Suspicious"] + enabled: true + +- name: "Network Security Group Modification" + description: "Detects modifications to NSGs which could indicate network security changes" + cloud: "azure" + detection_type: "network" + severity: "medium" + event_source: "Microsoft.Network" + event_name: "Microsoft.Network/networkSecurityGroups/write" + auto_tags: ["Medium", "Informational"] + enabled: true + +- name: "Key Vault Access Policy Change" + description: "Detects modifications to Key Vault access policies which could indicate unauthorized access attempts" + cloud: "azure" + detection_type: "keyvault" + severity: "high" + event_source: "Microsoft.KeyVault" + event_name: "Microsoft.KeyVault/vaults/accessPolicies/write" + auto_tags: ["High", "Suspicious"] + enabled: true + +- name: "Storage Account Configuration Change" + description: "Detects changes to storage account configurations which could indicate data exfiltration risks" + cloud: "azure" + detection_type: "storage" + severity: "medium" + event_source: "Microsoft.Storage" + event_name: "Microsoft.Storage/storageAccounts/write" + auto_tags: ["Medium", "Informational"] + enabled: true \ No newline at end of file diff --git a/apps/analysis/urls.py b/apps/analysis/urls.py index 8a9a023..868625b 100644 --- a/apps/analysis/urls.py +++ b/apps/analysis/urls.py @@ -5,14 +5,13 @@ urlpatterns = [ path('case//detections/', views.case_detections, name='case_detections'), + path('case//detections/run/', views.run_detections, name='run_detections'), + path('case//detections/results/', views.detection_results, name='detection_results'), path('case//detections/rules/', views.detection_list, name='detection_list'), path('case//detections/rules/create/', views.detection_create, name='detection_create'), path('case//detections/rules//edit/', views.detection_edit, name='detection_edit'), path('case//detections/rules//delete/', views.detection_delete, name='detection_delete'), - path('case//detections/run/', views.run_detections, name='run_detections'), path('case//detections/rules/load-prebuilt/', views.load_prebuilt_rules, name='load_prebuilt_rules'), - path('case//detection-result//tag/', - views.tag_detection_result, name='tag_detection_result'), - path('api/detection-result//tags/', - views.get_detection_result_tags, name='get_detection_result_tags'), + path('case//detections/result//tag/', views.tag_detection_result, name='tag_detection_result'), + path('detection-result//tags/', views.get_detection_result_tags, name='get_detection_result_tags'), ] \ No newline at end of file diff --git a/apps/analysis/views.py b/apps/analysis/views.py index 6e61046..c8de791 100644 --- a/apps/analysis/views.py +++ b/apps/analysis/views.py @@ -11,6 +11,7 @@ from io import StringIO from apps.aws.models import AWSAccount from .models import Tag +from apps.azure.models import AzureAccount @login_required def detection_list(request, case_id): @@ -115,28 +116,60 @@ def detection_results(request, case_id): @login_required def case_detections(request, case_id): - """Main detections page showing results and management options""" case = get_object_or_404(Case, id=case_id) - detection_results = DetectionResult.objects.filter( - case_id=case_id - ).select_related('detection', 'matched_log').order_by('-created_at') - # Get all available tags - available_tags = Tag.objects.all() + # Get account filter from query params + account_filter = request.GET.get('account') + + # Get all detection results for the case + results = DetectionResult.objects.filter(case=case) + + # Filter by account if specified + if account_filter: + account_type, account_id = account_filter.split(':') + if account_type == 'aws': + results = results.filter(matched_log__aws_account__account_id=account_id) + elif account_type == 'azure': + results = results.filter(matched_log__azure_account__subscription_id=account_id) + + # Get unique accounts that have logs + accounts = [] + aws_accounts = AWSAccount.objects.filter( + normalized_logs__detectionresult__case=case + ).distinct() + azure_accounts = AzureAccount.objects.filter( + normalized_logs__detectionresult__case=case + ).distinct() + + for aws_acc in aws_accounts: + accounts.append({ + 'id': f'aws:{aws_acc.account_id}', + 'name': f'AWS Account: {aws_acc.account_id}', + 'type': 'aws' + }) + + for azure_acc in azure_accounts: + accounts.append({ + 'id': f'azure:{azure_acc.subscription_id}', + 'name': f'Azure Account: {azure_acc.subscription_id}', + 'type': 'azure' + }) # Group results by detection results_by_detection = {} - for result in detection_results: + for result in results: if result.detection not in results_by_detection: results_by_detection[result.detection] = [] results_by_detection[result.detection].append(result) - + context = { 'case': case, 'results_by_detection': results_by_detection, - 'total_results': detection_results.count(), - 'detection_count': Detection.objects.filter(enabled=True).count(), - 'available_tags': available_tags + 'detection_count': Detection.objects.count(), + 'total_results': results.count(), + 'available_tags': Tag.objects.all(), + 'accounts': accounts, + 'selected_account': account_filter } return render(request, 'analysis/case_detections.html', context) diff --git a/apps/azure/admin.py b/apps/azure/admin.py index 8c38f3f..f259d5e 100644 --- a/apps/azure/admin.py +++ b/apps/azure/admin.py @@ -1,3 +1,9 @@ from django.contrib import admin +from .models import AzureAccount, AzureResource, AzureLogSource, AzureIdentity + +admin.site.register(AzureAccount) +admin.site.register(AzureResource) +admin.site.register(AzureLogSource) +admin.site.register(AzureIdentity) # Register your models here. diff --git a/apps/azure/forms.py b/apps/azure/forms.py new file mode 100644 index 0000000..a632f04 --- /dev/null +++ b/apps/azure/forms.py @@ -0,0 +1,52 @@ +from django import forms +from .models import AzureAccount, AzureResource + +class AzureAccountForm(forms.ModelForm): + """Form for connecting an Azure subscription using service principal credentials""" + class Meta: + model = AzureAccount + fields = ['subscription_id', 'tenant_id', 'client_id', 'client_secret'] + widgets = { + 'client_secret': forms.PasswordInput(), + 'subscription_id': forms.TextInput(attrs={ + 'placeholder': 'e.g., 12345678-1234-5678-1234-567812345678', + 'class': 'form-control' + }), + 'tenant_id': forms.TextInput(attrs={ + 'placeholder': 'e.g., 87654321-4321-8765-4321-876543210987', + 'class': 'form-control' + }), + 'client_id': forms.TextInput(attrs={ + 'placeholder': 'e.g., 11111111-2222-3333-4444-555555555555', + 'class': 'form-control' + }) + } + labels = { + 'subscription_id': 'Subscription ID', + 'tenant_id': 'Directory (tenant) ID', + 'client_id': 'Application (client) ID', + 'client_secret': 'Client Secret' + } + help_texts = { + 'subscription_id': 'Found in Azure Portal under Subscriptions', + 'tenant_id': 'Found in Azure Active Directory → Overview', + 'client_id': 'Found in App Registration → Overview', + 'client_secret': 'Created in App Registration → Certificates & secrets' + } + +class FetchActivityLogsForm(forms.Form): + """Form for fetching Azure Activity Logs""" + start_date = forms.DateField( + label="Start Date", + widget=forms.DateInput(attrs={ + "type": "date", + "class": "form-control" + }) + ) + end_date = forms.DateField( + label="End Date", + widget=forms.DateInput(attrs={ + "type": "date", + "class": "form-control" + }) + ) diff --git a/apps/azure/migrations/0001_initial.py b/apps/azure/migrations/0001_initial.py new file mode 100644 index 0000000..bbcea0b --- /dev/null +++ b/apps/azure/migrations/0001_initial.py @@ -0,0 +1,91 @@ +# Generated by Django 5.1.3 on 2025-02-17 02:54 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('case', '0001_initial'), + ('data', '0006_alter_detectionresult_case_and_more'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='AzureAccount', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('subscription_id', models.CharField(max_length=50, unique=True)), + ('tenant_id', models.CharField(max_length=50)), + ('client_id', models.CharField(max_length=100)), + ('client_secret', models.CharField(max_length=100)), + ('added_at', models.DateTimeField(auto_now_add=True)), + ('validated', models.BooleanField(default=False)), + ('added_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='added_azure_accounts', to=settings.AUTH_USER_MODEL)), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='azure_accounts', to='case.case')), + ], + ), + migrations.CreateModel( + name='AzureLogSource', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('service_name', models.CharField(max_length=100)), + ('log_name', models.CharField(max_length=255)), + ('log_details', models.JSONField(blank=True, null=True)), + ('status', models.CharField(max_length=50)), + ('location', models.CharField(blank=True, max_length=50, null=True)), + ('slug', models.SlugField(blank=True, max_length=255, unique=True)), + ('discovered_at', models.DateTimeField(auto_now_add=True)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='log_sources', to='azure.azureaccount')), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='azure_log_sources', to='case.case')), + ('tags', models.ManyToManyField(related_name='azure_log_source', to='data.tag')), + ], + ), + migrations.CreateModel( + name='AzureResource', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('resource_id', models.CharField(max_length=200)), + ('resource_type', models.CharField(max_length=100)), + ('resource_name', models.CharField(blank=True, max_length=200, null=True)), + ('resource_group', models.CharField(max_length=200)), + ('resource_details', models.JSONField(blank=True, null=True)), + ('location', models.CharField(blank=True, max_length=50, null=True)), + ('slug', models.SlugField(blank=True, max_length=255, unique=True)), + ('discovered_at', models.DateTimeField(auto_now_add=True)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='resources', to='azure.azureaccount')), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='azure_resources', to='case.case')), + ('tags', models.ManyToManyField(related_name='azure_resource', to='data.tag')), + ], + ), + migrations.CreateModel( + name='AzureIdentity', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('object_id', models.CharField(max_length=100)), + ('display_name', models.CharField(max_length=300)), + ('user_principal_name', models.CharField(blank=True, max_length=300, null=True)), + ('identity_type', models.CharField(max_length=50)), + ('mfa_enabled', models.BooleanField(default=False)), + ('created_datetime', models.DateTimeField(blank=True, null=True)), + ('last_sign_in', models.DateTimeField(blank=True, null=True)), + ('account_enabled', models.BooleanField(default=True)), + ('assigned_roles', models.JSONField(blank=True, null=True)), + ('identity_details', models.JSONField(blank=True, null=True)), + ('slug', models.SlugField(blank=True, max_length=255, unique=True)), + ('discovered_at', models.DateTimeField(auto_now_add=True)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identities', to='azure.azureaccount')), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='azure_identities', to='case.case')), + ('tags', models.ManyToManyField(related_name='azure_identity', to='data.tag')), + ], + options={ + 'verbose_name_plural': 'Azure identities', + 'unique_together': {('account', 'object_id')}, + }, + ), + ] diff --git a/apps/azure/models.py b/apps/azure/models.py index 71a8362..d275b92 100644 --- a/apps/azure/models.py +++ b/apps/azure/models.py @@ -1,3 +1,124 @@ from django.db import models +from django.conf import settings +from apps.case.models import Case +from django.utils.text import slugify +from apps.data.models import Tag -# Create your models here. +class AzureAccount(models.Model): + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='azure_accounts') + subscription_id = models.CharField(max_length=50, unique=True) + tenant_id = models.CharField(max_length=50) + client_id = models.CharField(max_length=100) # Application (client) ID + client_secret = models.CharField(max_length=100) # Client secret + added_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, related_name='added_azure_accounts') + added_at = models.DateTimeField(auto_now_add=True) + validated = models.BooleanField(default=False) + + def __str__(self): + return f"Azure Subscription {self.subscription_id} for Case {self.case.name}" + +#Get an overview of all the resources in the subscription for analysis +class AzureResource(models.Model): + account = models.ForeignKey('AzureAccount', on_delete=models.CASCADE, related_name='resources') + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='azure_resources') + resource_id = models.CharField(max_length=200) # Azure Resource ID + resource_type = models.CharField(max_length=100) # e.g., Microsoft.Compute/virtualMachines + resource_name = models.CharField(max_length=200, blank=True, null=True) + resource_group = models.CharField(max_length=200) + resource_details = models.JSONField(blank=True, null=True) + location = models.CharField(max_length=50, blank=True, null=True) # Azure region + slug = models.SlugField(max_length=255, unique=True, blank=True) + discovered_at = models.DateTimeField(auto_now_add=True) + + # Tags + tags = models.ManyToManyField(Tag, related_name='azure_resource') + + def save(self, *args, **kwargs): + if not self.slug: + base_slug = slugify(f"{self.resource_type}-{self.resource_name}") + unique_slug = base_slug + num = 1 + while AzureResource.objects.filter(slug=unique_slug).exists(): + unique_slug = f"{base_slug}-{num}" + num += 1 + self.slug = unique_slug + super().save(*args, **kwargs) + + def __str__(self): + return f"{self.resource_type} - {self.resource_name or self.resource_id}" + +#Represents different Azure logging sources like Activity Logs and sign in logs +class AzureLogSource(models.Model): + + account = models.ForeignKey('AzureAccount', on_delete=models.CASCADE, related_name='log_sources') + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='azure_log_sources') + service_name = models.CharField(max_length=100) # e.g., ActivityLogs, DiagnosticSettings + log_name = models.CharField(max_length=255) + log_details = models.JSONField(blank=True, null=True) + status = models.CharField(max_length=50) + location = models.CharField(max_length=50, blank=True, null=True) + slug = models.SlugField(max_length=255, unique=True, blank=True) + discovered_at = models.DateTimeField(auto_now_add=True) + + # Tags + tags = models.ManyToManyField(Tag, related_name='azure_log_source') + + def save(self, *args, **kwargs): + if not self.slug: + base_slug = slugify(f"{self.service_name}-{self.log_name}") + unique_slug = base_slug + num = 1 + while AzureLogSource.objects.filter(slug=unique_slug).exists(): + unique_slug = f"{base_slug}-{num}" + num += 1 + self.slug = unique_slug + super().save(*args, **kwargs) + + def __str__(self): + return f"{self.service_name} - {self.log_name}" + + +# Model to store Azure AD identities and their details +class AzureIdentity(models.Model): + account = models.ForeignKey(AzureAccount, on_delete=models.CASCADE, related_name='identities') + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='azure_identities') + + # Identity details + object_id = models.CharField(max_length=100) + display_name = models.CharField(max_length=300) + user_principal_name = models.CharField(max_length=300, null=True, blank=True) + identity_type = models.CharField(max_length=50) # user, service_principal, managed_identity + + # Security details + mfa_enabled = models.BooleanField(default=False) + created_datetime = models.DateTimeField(null=True, blank=True) + last_sign_in = models.DateTimeField(null=True, blank=True) + account_enabled = models.BooleanField(default=True) + + # Additional details stored as JSON + assigned_roles = models.JSONField(blank=True, null=True) + identity_details = models.JSONField(blank=True, null=True) + + slug = models.SlugField(max_length=255, unique=True, blank=True) + discovered_at = models.DateTimeField(auto_now_add=True) + + # Tags + tags = models.ManyToManyField(Tag, related_name='azure_identity') + + class Meta: + unique_together = ('account', 'object_id') + verbose_name_plural = "Azure identities" + + def save(self, *args, **kwargs): + if not self.slug: + base_slug = slugify(f"{self.identity_type}-{self.display_name}") + unique_slug = base_slug + num = 1 + while AzureIdentity.objects.filter(slug=unique_slug).exists(): + unique_slug = f"{base_slug}-{num}" + num += 1 + self.slug = unique_slug + super().save(*args, **kwargs) + + def __str__(self): + return f"{self.display_name} ({self.identity_type})" diff --git a/apps/azure/tasks.py b/apps/azure/tasks.py new file mode 100644 index 0000000..841d879 --- /dev/null +++ b/apps/azure/tasks.py @@ -0,0 +1,63 @@ +from celery import shared_task +from .models import AzureAccount +from .utils import pull_azure_resources, discover_log_sources, fetch_and_normalize_activity_logs +import logging + +logger = logging.getLogger('azure_tasks') +logger = logging.getLogger(__name__) + +@shared_task +def pull_azure_resources_task(subscription_id): + """ + Background task to pull Azure resources for a given subscription ID. + """ + try: + logger.info(f"Starting discovery for Azure subscription ID: {subscription_id}") + azure_account = AzureAccount.objects.get(subscription_id=subscription_id) + + # Pull resources and discover log sources + pull_azure_resources(azure_account) + discover_log_sources(azure_account) + + logger.info(f"Successfully pulled resources for Azure subscription ID: {subscription_id}") + return f"Successfully pulled resources for Azure subscription {subscription_id}" + except AzureAccount.DoesNotExist: + logger.error(f"AzureAccount with subscription ID {subscription_id} does not exist.") + return f"AzureAccount with subscription ID {subscription_id} does not exist." + except Exception as e: + logger.error(f"Error pulling Azure resources: {e}") + return f"Error pulling resources: {str(e)}" + +@shared_task +def fetch_normalize_activity_logs_task(subscription_id, start_date, end_date, case_id): + """Background task to fetch and normalize Azure Activity Log entries.""" + try: + logger.info(f"Starting Activity Log fetch for Azure subscription ID: {subscription_id}") + fetch_and_normalize_activity_logs( + subscription_id=subscription_id, + start_date=start_date, + end_date=end_date, + case_id=case_id + ) + logger.info(f"Successfully fetched Activity Logs for Azure subscription ID: {subscription_id}") + except Exception as e: + logger.error(f"Error fetching Activity Logs for Azure subscription ID {subscription_id}: {e}") + raise + +@shared_task +def fetch_azure_identities_task(account_id): + """ + Background task to fetch Azure AD identities. + Note: This would need a corresponding utility function in utils.py + """ + try: + logger.info(f"Starting identity fetch for Azure subscription ID: {account_id}") + azure_account = AzureAccount.objects.get(subscription_id=account_id) + # TODO: Implement fetch_azure_identities in utils.py + # fetch_azure_identities(azure_account) + logger.info(f"Successfully fetched identities for Azure subscription ID: {account_id}") + except AzureAccount.DoesNotExist: + logger.error(f"AzureAccount with subscription ID {account_id} does not exist.") + except Exception as e: + logger.error(f"Error fetching identities for Azure subscription ID {account_id}: {e}") + raise diff --git a/apps/azure/urls.py b/apps/azure/urls.py new file mode 100644 index 0000000..98dbb0c --- /dev/null +++ b/apps/azure/urls.py @@ -0,0 +1,41 @@ +from django.urls import path +from django.views.generic import TemplateView + +from . import views + +app_name = "azure" + +urlpatterns = [ + # Basic account management + path('/connect/azure/', views.connect_azure, name='connect_azure'), + path('accounts//edit/', views.edit_account, name='edit_account'), + path('accounts//delete/', views.delete_account, name='delete_account'), + + # Resource management + path('accounts//pull-resources/', views.pull_resources_view, name='pull_resources'), + path('resource//details/', views.azure_resource_details, name='azure_resource_details'), + path('accounts//account-resources/', views.account_resources, name='account_resources'), + + # Log sources and activity logs + path('logsource//details/', views.azure_logsource_details, name='azure_logsource_details'), + path('accounts//fetch-activity-logs/', views.trigger_activity_log_fetch, name='fetch_activity_logs'), + path('accounts//logs/', views.normalized_logs_view, name='normalized_logs'), + + # Identity management + path('identity//', views.azure_identity_details, name='azure_identity_details'), + + # Tag management for resources + path('resources//add-tag/', views.add_tag_to_resource, name='add_tag_to_resource'), + path('resources//edit-tag//', views.edit_resource_tag, name='edit_resource_tag'), + path('resources//remove-tag//', views.remove_tag_from_resource, name='remove_tag_from_resource'), + + # Tag management for identities + path('identities//add-tag/', views.add_tag_to_identity, name='add_tag_to_identity'), + path('identities//edit-tag//', views.edit_identity_tag, name='edit_identity_tag'), + path('identities//remove-tag//', views.remove_tag_from_identity, name='remove_tag_from_identity'), + + # Tag management for log sources + path('logsources//add-tag/', views.add_tag_to_logsource, name='add_tag_to_logsource'), + path('logsources//edit-tag//', views.edit_logsource_tag, name='edit_logsource_tag'), + path('logsources//remove-tag//', views.remove_tag_from_logsource, name='remove_tag_from_logsource'), +] diff --git a/apps/azure/utils.py b/apps/azure/utils.py new file mode 100644 index 0000000..b7e5983 --- /dev/null +++ b/apps/azure/utils.py @@ -0,0 +1,314 @@ +from azure.identity import ClientSecretCredential +from azure.mgmt.resource import ResourceManagementClient +from azure.mgmt.monitor import MonitorManagementClient +from azure.mgmt.loganalytics import LogAnalyticsManagementClient +from azure.graphrbac import GraphRbacManagementClient +from msrestazure.azure_active_directory import MSIAuthentication +from .models import AzureResource, AzureLogSource, AzureAccount, AzureIdentity +from apps.data.models import NormalizedLog +from apps.case.models import Case +from datetime import datetime, timedelta +from django.utils import timezone +from django.utils.timezone import make_aware +from django.db import transaction +import logging +import json +import ipaddress + +logger = logging.getLogger(__name__) + +def parse_azure_datetime(datetime_str): + """Parse Azure datetime strings into timezone-aware datetime objects""" + if datetime_str and datetime_str not in ['N/A', 'not_supported']: + try: + dt = datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S.%fZ') + return make_aware(dt, timezone.utc) + except ValueError: + try: + dt = datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ') + return make_aware(dt, timezone.utc) + except ValueError: + logger.debug(f"Could not parse datetime: {datetime_str}") + return None + return None + +def validate_azure_credentials(tenant_id, client_id, client_secret, subscription_id): + """Validate Azure credentials by attempting to create a credential object and list resources""" + try: + # First try to create the credential object + logger.info(f"Attempting to validate Azure credentials for subscription {subscription_id}") + credentials = ClientSecretCredential( + tenant_id=tenant_id, + client_id=client_id, + client_secret=client_secret + ) + + try: + # Then try to create a client and list resources + logger.info("Credentials created, attempting to list resources") + client = ResourceManagementClient(credentials, subscription_id) + + # Add debug logging before the list operation + logger.info("Created ResourceManagementClient, about to list resources") + resources = client.resources.list(top=1) + logger.info("Got resources iterator") + + try: + # Explicitly try to get the first item and log any errors + first_resource = next(resources) + logger.info(f"Successfully retrieved first resource: {first_resource.name if first_resource else 'None'}") + except StopIteration: + logger.info("No resources found in subscription, but access is valid") + except Exception as iter_error: + logger.error(f"Error accessing resource iterator: {str(iter_error)}", exc_info=True) + raise + + logger.info("Successfully validated Azure credentials") + return True, None + + except Exception as resource_error: + # Handle specific Azure API errors + error_message = str(resource_error) + logger.error(f"Full Azure API error: {error_message}", exc_info=True) + + if "AuthenticationFailed" in error_message: + return False, "Authentication failed. Please check your credentials." + elif "SubscriptionNotFound" in error_message: + return False, "Subscription not found. Please check your Subscription ID." + elif "InvalidAuthenticationTokenTenant" in error_message: + return False, "Invalid tenant. Please check your Tenant ID." + elif "AuthorizationFailed" in error_message: + return False, "Authorization failed. The app registration needs Reader role at subscription level." + else: + logger.error(f"Unhandled Azure validation error: {error_message}", exc_info=True) + return False, f"Resource access error: {error_message}" + + except Exception as cred_error: + # Handle credential creation errors + error_message = str(cred_error) + logger.error(f"Azure credential creation error: {error_message}", exc_info=True) + return False, f"Credential error: {error_message}" + +def serialize_resource_details(resource): + """Serialize resource details, handling datetime objects""" + if isinstance(resource, dict): + return {key: serialize_resource_details(value) for key, value in resource.items()} + elif isinstance(resource, list): + return [serialize_resource_details(item) for item in resource] + elif isinstance(resource, datetime): + return resource.isoformat() + else: + return resource + +def pull_azure_resources(azure_account): + """Discover and store Azure resources for the account""" + logger.info(f"Pulling Azure resources for subscription: {azure_account.subscription_id}") + + credentials = ClientSecretCredential( + tenant_id=azure_account.tenant_id, + client_id=azure_account.client_id, + client_secret=azure_account.client_secret + ) + + # Initialize the resource client + resource_client = ResourceManagementClient( + credentials, + azure_account.subscription_id + ) + + try: + # Get all resources + for resource in resource_client.resources.list(): + try: + # Extract resource group from ID + resource_group = resource.id.split('/')[4] if resource.id else None + + # Create or update resource record + AzureResource.objects.update_or_create( + account=azure_account, + case=azure_account.case, + resource_id=resource.id, + defaults={ + 'resource_type': resource.type, + 'resource_name': resource.name, + 'resource_group': resource_group, + 'location': resource.location, + 'resource_details': serialize_resource_details(resource.as_dict()) + } + ) + logger.info(f"Saved resource: {resource.name} ({resource.type})") + except Exception as e: + logger.error(f"Error processing resource {resource.name}: {e}") + continue + + except Exception as e: + logger.error(f"Error pulling Azure resources: {e}") + +def discover_log_sources(azure_account): + """Discover available Azure log sources""" + logger.info(f"Discovering log sources for subscription: {azure_account.subscription_id}") + + credentials = ClientSecretCredential( + tenant_id=azure_account.tenant_id, + client_id=azure_account.client_id, + client_secret=azure_account.client_secret + ) + + monitor_client = MonitorManagementClient( + credentials, + azure_account.subscription_id + ) + + try: + # Check Activity Log settings + activity_log = { + 'service_name': 'ActivityLog', + 'log_name': 'Azure Activity Log', + 'status': 'Enabled', # Activity Log is always enabled + 'log_details': {'type': 'platform'} + } + + AzureLogSource.objects.update_or_create( + account=azure_account, + case=azure_account.case, + service_name=activity_log['service_name'], + log_name=activity_log['log_name'], + defaults={ + 'status': activity_log['status'], + 'log_details': activity_log['log_details'] + } + ) + + # Check Diagnostic Settings for resources + for resource in AzureResource.objects.filter(account=azure_account): + try: + diagnostic_settings = monitor_client.diagnostic_settings.list(resource.resource_id) + for setting in diagnostic_settings: + AzureLogSource.objects.update_or_create( + account=azure_account, + case=azure_account.case, + service_name='DiagnosticSettings', + log_name=f"{resource.resource_name}-{setting.name}", + defaults={ + 'status': 'Enabled' if setting.logs else 'Disabled', + 'log_details': serialize_resource_details(setting.as_dict()), + 'location': resource.location + } + ) + except Exception as e: + logger.debug(f"Error checking diagnostic settings for {resource.resource_name}: {e}") + continue + + except Exception as e: + logger.error(f"Error discovering log sources: {e}") + +def normalize_azure_event(raw_event, case, azure_account): + """Normalize Azure Activity Log events to match the NormalizedLog model""" + try: + # Extract common fields + event_time = parse_azure_datetime(raw_event.get('eventTimestamp')) + + # Handle caller identity + caller = raw_event.get('caller') or 'Unknown' + if raw_event.get('claims'): + caller = raw_event['claims'].get('name', caller) + + # Extract operation details + operation_name = raw_event.get('operationName', {}).get('value', 'Unknown') + operation_type = raw_event.get('operationName', {}).get('localizedValue', 'Unknown') + + # Process IP address + ip_address = None + if raw_event.get('claims', {}).get('ipaddr'): + try: + ip_address = raw_event['claims']['ipaddr'] + ipaddress.ip_address(ip_address) # Validate IP address + except ValueError: + ip_address = None + + # Build resources list + resources = [] + if raw_event.get('resourceId'): + resources.append({ + 'resourceId': raw_event['resourceId'], + 'resourceType': raw_event.get('resourceType', {}).get('value'), + 'resourceGroup': raw_event.get('resourceGroup') + }) + + normalized_data = { + 'case': case, + 'azure_account': azure_account, + 'event_source': 'azure', + 'event_id': raw_event.get('eventDataId'), + 'event_time': event_time, + 'event_name': operation_name, + 'event_type': operation_type, + 'user_identity': caller, + 'region': raw_event.get('resourceLocation'), + 'ip_address': ip_address, + 'user_agent': raw_event.get('claims', {}).get('userAgent'), + 'resources': json.dumps(resources), + 'raw_data': json.dumps(raw_event) + } + + return normalized_data + except Exception as e: + logger.error(f"Error normalizing Azure event: {e}") + return None + +def fetch_and_normalize_activity_logs(subscription_id, start_date, end_date, case_id): + """Fetch and normalize Azure Activity Log entries""" + try: + azure_account = AzureAccount.objects.get(subscription_id=subscription_id) + case = Case.objects.get(id=case_id) + except (AzureAccount.DoesNotExist, Case.DoesNotExist) as e: + logger.error(f"Error fetching account or case: {e}") + return + + credentials = ClientSecretCredential( + tenant_id=azure_account.tenant_id, + client_id=azure_account.client_id, + client_secret=azure_account.client_secret + ) + + monitor_client = MonitorManagementClient( + credentials, + azure_account.subscription_id + ) + + try: + # Convert dates to datetime objects if they're strings + if isinstance(start_date, str): + start_date = datetime.strptime(start_date, '%Y-%m-%d') + if isinstance(end_date, str): + end_date = datetime.strptime(end_date, '%Y-%m-%d') + + # Fetch activity logs + filter_string = f"eventTimestamp ge '{start_date.isoformat()}' and eventTimestamp le '{end_date.isoformat()}'" + + normalized_logs = [] + batch_size = 1000 + + for activity_log in monitor_client.activity_logs.list(filter=filter_string): + try: + normalized_data = normalize_azure_event(activity_log.as_dict(), case, azure_account) + if normalized_data: + normalized_logs.append(NormalizedLog(**normalized_data)) + + # Batch create when we reach batch_size + if len(normalized_logs) >= batch_size: + with transaction.atomic(): + NormalizedLog.objects.bulk_create(normalized_logs) + normalized_logs = [] + + except Exception as e: + logger.error(f"Error processing activity log entry: {e}") + continue + + # Create any remaining logs + if normalized_logs: + with transaction.atomic(): + NormalizedLog.objects.bulk_create(normalized_logs) + + except Exception as e: + logger.error(f"Error fetching activity logs: {e}") diff --git a/apps/azure/views.py b/apps/azure/views.py index 91ea44a..0193660 100644 --- a/apps/azure/views.py +++ b/apps/azure/views.py @@ -1,3 +1,440 @@ -from django.shortcuts import render +from django.shortcuts import render, redirect, get_object_or_404 +from django.contrib.auth.decorators import login_required +from django.contrib import messages +from django.utils import timezone +from apps.case.models import Case +from .models import AzureAccount, AzureResource, AzureLogSource, AzureIdentity +from apps.data.models import NormalizedLog, Tag +from .forms import AzureAccountForm +from .utils import validate_azure_credentials +from .tasks import pull_azure_resources_task, fetch_normalize_activity_logs_task +from datetime import datetime, timedelta +from django.db.models import Count -# Create your views here. +import logging + +logger = logging.getLogger(__name__) + +@login_required +def connect_azure(request, slug): + """Connect an Azure account to a case.""" + case = get_object_or_404(Case, slug=slug) + + if request.method == "POST": + form = AzureAccountForm(request.POST) + if form.is_valid(): + azure_account = form.save(commit=False) + azure_account.case = case + azure_account.added_by = request.user + + # Validate credentials + is_valid, error_message = validate_azure_credentials( + tenant_id=azure_account.tenant_id, + client_id=azure_account.client_id, + client_secret=azure_account.client_secret, + subscription_id=azure_account.subscription_id + ) + azure_account.validated = is_valid + azure_account.save() + + if is_valid: + messages.success(request, "Azure account connected successfully!") + else: + messages.error(request, f"Azure account saved, but validation failed: {error_message}") + + return redirect('case:case_detail', slug=case.slug) + else: + form = AzureAccountForm() + + return render(request, 'azure/connect_azure.html', {'form': form, 'case': case}) + +@login_required +def edit_account(request, subscription_id): + """Edit an existing Azure account.""" + account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + logger.info(f"Editing Azure account with subscription ID: {subscription_id}") + + if request.method == "POST": + form = AzureAccountForm(request.POST, instance=account) + if form.is_valid(): + azure_account = form.save(commit=False) + + # Log the values being used for validation (mask sensitive data) + logger.info(f"Validating credentials for subscription: {azure_account.subscription_id}") + logger.info(f"Using tenant_id: {azure_account.tenant_id}") + logger.info(f"Using client_id: {azure_account.client_id}") + logger.info("Client secret provided: [MASKED]") + + # Re-validate credentials + is_valid, error_message = validate_azure_credentials( + tenant_id=azure_account.tenant_id, + client_id=azure_account.client_id, + client_secret=azure_account.client_secret, + subscription_id=azure_account.subscription_id + ) + + # Log the validation result + if is_valid: + logger.info("Azure credential validation successful") + else: + logger.error(f"Azure credential validation failed: {error_message}") + + azure_account.validated = is_valid + azure_account.save() + + if is_valid: + messages.success(request, "Azure account updated and credentials validated successfully!") + else: + messages.error(request, f"Azure account updated, but validation failed: {error_message}") + + return redirect('case:case_detail', slug=account.case.slug) + else: + form = AzureAccountForm(instance=account) + + return render(request, 'azure/edit_account.html', {'form': form, 'account': account}) + +@login_required +def delete_account(request, subscription_id): + """Delete an Azure account.""" + account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + slug = account.case.slug + account.delete() + return redirect('case:case_detail', slug=slug) + +@login_required +def pull_resources_view(request, subscription_id): + """Trigger the background task to pull Azure resources.""" + azure_account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + + if not azure_account.validated: + messages.error(request, "Cannot pull resources because the Azure account credentials are not validated.") + return redirect('case:case_detail', slug=azure_account.case.slug) + + # Trigger background task with subscription_id + pull_azure_resources_task.delay(subscription_id) + messages.info(request, "Resource pulling has started. Refresh the page after a few minutes to see the results.") + + return redirect('azure:account_resources', subscription_id=subscription_id) + +@login_required +def account_resources(request, subscription_id): + """Display Azure resources and log sources for an account.""" + azure_account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + case = azure_account.case + + # Group resources by their type + resources = AzureResource.objects.filter(account=azure_account).order_by('resource_type', 'resource_name') + grouped_resources = {} + for resource in resources: + grouped_resources.setdefault(resource.resource_type, []).append(resource) + + # Group log sources by service + log_sources = AzureLogSource.objects.filter(account=azure_account).order_by('service_name', 'log_name') + grouped_log_sources = {} + for log_source in log_sources: + grouped_log_sources.setdefault(log_source.service_name, []).append(log_source) + + # Add error messages if applicable + error_messages = [] + if not resources.exists(): + error_messages.append("No Azure resources found for this account.") + if not log_sources.exists(): + error_messages.append("No Azure log sources found for this account.") + + context = { + 'azure_account': azure_account, + 'case': case, + 'grouped_resources': grouped_resources, + 'grouped_log_sources': grouped_log_sources, + 'error_messages': error_messages, + 'all_tags': Tag.objects.all(), + } + return render(request, 'azure/account_resources.html', context) + +@login_required +def azure_resource_details(request, slug): + """Display detailed information for a specific Azure resource.""" + resource = get_object_or_404(AzureResource, slug=slug) + account = resource.account + case = account.case + + return render(request, 'azure/resource_details.html', { + 'resource': resource, + 'account': account, + 'case': case + }) + +@login_required +def azure_logsource_details(request, slug): + """Display detailed information for a specific Azure log source.""" + log_source = get_object_or_404(AzureLogSource, slug=slug) + account = log_source.account + case = account.case + + return render(request, 'azure/logsource_details.html', { + 'log_source': log_source, + 'account': account, + 'case': case, + 'azure_account': account + }) + +@login_required +def trigger_activity_log_fetch(request, subscription_id): + """Trigger the background task to fetch Azure Activity Logs""" + azure_account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + logger.info(f"Triggering Activity Log fetch for Azure subscription {subscription_id}") + + # Set date range for last 89 days to be safe + end_date = timezone.now() + start_date = end_date - timedelta(days=89) # Changed from 90 to 89 to be safe + + # Format dates as strings + start_date_str = start_date.strftime('%Y-%m-%d') + end_date_str = end_date.strftime('%Y-%m-%d') + + # Trigger background task with positional arguments + fetch_normalize_activity_logs_task.delay( + subscription_id, + start_date_str, + end_date_str, + azure_account.case.id + ) + + messages.info(request, + "Azure Activity Log history is being fetched. Note: Azure only retains activity logs for 90 days." + ) + logger.info(f"Task queued for Azure subscription {subscription_id}") + + return redirect("azure:normalized_logs", subscription_id=azure_account.subscription_id) + +@login_required +def normalized_logs_view(request, subscription_id): + """Display normalized logs for an Azure account.""" + azure_account = get_object_or_404(AzureAccount, subscription_id=subscription_id) + + # Get date range from request + start_date = request.GET.get('start_date') + end_date = request.GET.get('end_date') + + # Set default date range if not provided (last 7 days) + if not start_date: + start_date = (timezone.now() - timedelta(days=7)).strftime('%Y-%m-%d') + if not end_date: + end_date = timezone.now().strftime('%Y-%m-%d') + + # Convert to datetime objects (naive) + start_datetime = datetime.strptime(start_date, '%Y-%m-%d') + end_datetime = datetime.strptime(end_date, '%Y-%m-%d') + timedelta(days=1) + + # Calculate 90 days ago (naive) + ninety_days_ago = timezone.now().replace(tzinfo=None) - timedelta(days=90) + + # Check if requested date range is within 90 days + if start_datetime < ninety_days_ago: + messages.warning(request, "Azure only retains activity logs for 90 days. Adjusting start date.") + start_datetime = ninety_days_ago + start_date = start_datetime.strftime('%Y-%m-%d') + + # Make datetime objects timezone-aware after all calculations + start_datetime = timezone.make_aware(start_datetime) + end_datetime = timezone.make_aware(end_datetime) + + # Query logs + logs = NormalizedLog.objects.filter( + case=azure_account.case, + event_source='azure', + event_time__gte=start_datetime, + event_time__lt=end_datetime, + azure_account=azure_account + ).order_by('-event_time') + + # Aggregate top 10 users + top_users = logs.values('user_identity').annotate( + count=Count('user_identity')).order_by('-count')[:10] + + # Aggregate top 10 IPs + top_ips = logs.values('ip_address').annotate( + count=Count('ip_address')).order_by('-count')[:10] + + # Aggregate top 10 events + top_events = logs.values('event_name').annotate( + count=Count('event_name')).order_by('-count')[:10] + + + context = { + "azure_account": azure_account, + "logs": logs, + "top_users": top_users, + "top_ips": top_ips, + "top_events": top_events, + "start_date": start_date, + "end_date": end_date, + } + return render(request, "azure/get_logs.html", context) + +@login_required +def add_tag_to_resource(request, resource_id): + if request.method == 'POST': + tag_id = request.POST.get('tag_id') + try: + resource = AzureResource.objects.get(id=resource_id) + tag = Tag.objects.get(id=tag_id) + resource.tags.add(tag) + messages.success(request, f'Tag "{tag.name}" added successfully.') + except (AzureResource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error adding tag.') + return redirect('azure:account_resources', subscription_id=resource.account.subscription_id) + +@login_required +def edit_resource_tag(request, resource_id, tag_id): + if request.method == 'POST': + new_tag_id = request.POST.get('new_tag_id') + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + resource = AzureResource.objects.get(id=resource_id) + old_tag = Tag.objects.get(id=tag_id) + new_tag = Tag.objects.get(id=new_tag_id) + + resource.tags.remove(old_tag) + resource.tags.add(new_tag) + messages.success(request, f'Tag updated from "{old_tag.name}" to "{new_tag.name}"') + except (AzureResource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error updating tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=resource.account.subscription_id) + +@login_required +def remove_tag_from_resource(request, resource_id, tag_id): + if request.method == 'POST': + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + resource = AzureResource.objects.get(id=resource_id) + tag = Tag.objects.get(id=tag_id) + resource.tags.remove(tag) + messages.success(request, f'Tag "{tag.name}" removed successfully.') + except (AzureResource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error removing tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=resource.account.subscription_id) + +@login_required +def add_tag_to_logsource(request, logsource_id): + if request.method == 'POST': + tag_id = request.POST.get('tag_id') + try: + logsource = AzureLogSource.objects.get(id=logsource_id) + tag = Tag.objects.get(id=tag_id) + logsource.tags.add(tag) + messages.success(request, f'Tag "{tag.name}" added successfully.') + except (AzureLogSource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error adding tag.') + return redirect('azure:account_resources', subscription_id=logsource.account.subscription_id) + +@login_required +def edit_logsource_tag(request, logsource_id, tag_id): + if request.method == 'POST': + new_tag_id = request.POST.get('new_tag_id') + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + logsource = AzureLogSource.objects.get(id=logsource_id) + old_tag = Tag.objects.get(id=tag_id) + new_tag = Tag.objects.get(id=new_tag_id) + + logsource.tags.remove(old_tag) + logsource.tags.add(new_tag) + messages.success(request, f'Tag updated from "{old_tag.name}" to "{new_tag.name}"') + except (AzureLogSource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error updating tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=logsource.account.subscription_id) + +@login_required +def remove_tag_from_logsource(request, logsource_id, tag_id): + if request.method == 'POST': + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + logsource = AzureLogSource.objects.get(id=logsource_id) + tag = Tag.objects.get(id=tag_id) + logsource.tags.remove(tag) + messages.success(request, f'Tag "{tag.name}" removed successfully.') + except (AzureLogSource.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error removing tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=logsource.account.subscription_id) + +@login_required +def azure_identity_details(request, slug): + """Display detailed information for a specific Azure identity.""" + identity = get_object_or_404(AzureIdentity, slug=slug) + account = identity.account + case = account.case + + return render(request, 'azure/identity_details.html', { + 'identity': identity, + 'account': account, + 'case': case, + 'azure_account': account + }) + +@login_required +def add_tag_to_identity(request, identity_id): + if request.method == 'POST': + tag_id = request.POST.get('tag_id') + try: + identity = AzureIdentity.objects.get(id=identity_id) + tag = Tag.objects.get(id=tag_id) + identity.tags.add(tag) + messages.success(request, f'Tag "{tag.name}" added successfully.') + except (AzureIdentity.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error adding tag.') + return redirect('azure:account_resources', subscription_id=identity.account.subscription_id) + +@login_required +def edit_identity_tag(request, identity_id, tag_id): + if request.method == 'POST': + new_tag_id = request.POST.get('new_tag_id') + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + identity = AzureIdentity.objects.get(id=identity_id) + old_tag = Tag.objects.get(id=tag_id) + new_tag = Tag.objects.get(id=new_tag_id) + + identity.tags.remove(old_tag) + identity.tags.add(new_tag) + messages.success(request, f'Tag updated from "{old_tag.name}" to "{new_tag.name}"') + except (AzureIdentity.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error updating tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=identity.account.subscription_id) + +@login_required +def remove_tag_from_identity(request, identity_id, tag_id): + if request.method == 'POST': + redirect_url = request.META.get('HTTP_REFERER', '') + + try: + identity = AzureIdentity.objects.get(id=identity_id) + tag = Tag.objects.get(id=tag_id) + identity.tags.remove(tag) + messages.success(request, f'Tag "{tag.name}" removed successfully.') + except (AzureIdentity.DoesNotExist, Tag.DoesNotExist): + messages.error(request, 'Error removing tag.') + + if redirect_url: + return redirect(redirect_url) + return redirect('azure:account_resources', subscription_id=identity.account.subscription_id) diff --git a/apps/case/views.py b/apps/case/views.py index a4c58bc..5267748 100644 --- a/apps/case/views.py +++ b/apps/case/views.py @@ -3,6 +3,8 @@ from .models import Case from .forms import CaseForm from apps.aws.models import AWSAccount +from apps.azure.models import AzureAccount +from apps.gcp.models import GCPAccount # Create a new case for the investigation (this is the first step) @login_required @@ -25,8 +27,10 @@ def create_case(request): def case_detail(request, slug): case = get_object_or_404(Case, slug=slug) - # AWS accounts linked to the case + # Get accounts linked to the case aws_accounts = AWSAccount.objects.filter(case=case) + azure_accounts = AzureAccount.objects.filter(case=case) + gcp_accounts = GCPAccount.objects.filter(case=case) # Add GCP and Azure placeholders gcp_placeholder = True @@ -35,8 +39,8 @@ def case_detail(request, slug): return render(request, "case/case_detail.html", { "case": case, "aws_accounts": aws_accounts, - "gcp_placeholder": gcp_placeholder, - "azure_placeholder": azure_placeholder, + "gcp_accounts": gcp_accounts, + "azure_accounts": azure_accounts, }) # this is used to edit the details of a case diff --git a/apps/data/migrations/0007_normalizedlog_azure_account.py b/apps/data/migrations/0007_normalizedlog_azure_account.py new file mode 100644 index 0000000..47a3e3c --- /dev/null +++ b/apps/data/migrations/0007_normalizedlog_azure_account.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.3 on 2025-02-18 05:36 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('azure', '0001_initial'), + ('data', '0006_alter_detectionresult_case_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='normalizedlog', + name='azure_account', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='normalized_logs', to='azure.azureaccount'), + ), + ] diff --git a/apps/data/migrations/0008_alter_normalizedlog_aws_account.py b/apps/data/migrations/0008_alter_normalizedlog_aws_account.py new file mode 100644 index 0000000..85fd7ef --- /dev/null +++ b/apps/data/migrations/0008_alter_normalizedlog_aws_account.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.3 on 2025-02-18 05:53 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('aws', '0002_awscredential_tags_awslogsource_tags_and_more'), + ('data', '0007_normalizedlog_azure_account'), + ] + + operations = [ + migrations.AlterField( + model_name='normalizedlog', + name='aws_account', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='normalized_logs', to='aws.awsaccount'), + ), + ] diff --git a/apps/data/models.py b/apps/data/models.py index 3ccddbd..e3434d8 100644 --- a/apps/data/models.py +++ b/apps/data/models.py @@ -47,7 +47,8 @@ class NormalizedLog(models.Model): raw_data = models.TextField() # Serialized JSON as text # Use string reference to break circular import - aws_account = models.ForeignKey('aws.AWSAccount', on_delete=models.CASCADE, related_name='normalized_logs') + aws_account = models.ForeignKey('aws.AWSAccount', on_delete=models.CASCADE, null=True, blank=True, related_name='normalized_logs') + azure_account = models.ForeignKey('azure.AzureAccount', on_delete=models.CASCADE, null=True, blank=True, related_name='normalized_logs') tags = models.ManyToManyField(Tag, related_name='normalized_logs') # Utility diff --git a/apps/data/views.py b/apps/data/views.py index 74659fb..fae7c4f 100644 --- a/apps/data/views.py +++ b/apps/data/views.py @@ -6,22 +6,52 @@ from datetime import datetime from django.contrib import messages from apps.aws.models import AWSAccount +from apps.azure.models import AzureAccount @login_required def NormalizedLogListView(request): - # Get account_id from query params if it exists - account_id = request.GET.get('account_id') - aws_account = None + # Get account filter from query params + account_filter = request.GET.get('account') case = None + # Start with all logs queryset = NormalizedLog.objects.all().order_by('-event_time') - # Filter by account if specified - if account_id: - aws_account = get_object_or_404(AWSAccount, account_id=account_id) - queryset = queryset.filter(aws_account=aws_account) - case = aws_account.case + # Parse account filter (format: "aws:account_id" or "azure:subscription_id") + if account_filter: + account_type, account_id = account_filter.split(':') + if account_type == 'aws': + aws_account = get_object_or_404(AWSAccount, account_id=account_id) + queryset = queryset.filter(aws_account=aws_account) + case = aws_account.case + elif account_type == 'azure': + azure_account = get_object_or_404(AzureAccount, subscription_id=account_id) + queryset = queryset.filter(azure_account=azure_account) + case = azure_account.case + # Get unique accounts that have logs + accounts = [] + aws_accounts = AWSAccount.objects.filter( + normalized_logs__isnull=False + ).distinct() + azure_accounts = AzureAccount.objects.filter( + normalized_logs__isnull=False + ).distinct() + + for aws_acc in aws_accounts: + accounts.append({ + 'id': f'aws:{aws_acc.account_id}', + 'name': f'AWS Account: {aws_acc.account_id}', + 'type': 'aws' + }) + + for azure_acc in azure_accounts: + accounts.append({ + 'id': f'azure:{azure_acc.subscription_id}', + 'name': f'Azure Account: {azure_acc.subscription_id}', + 'type': 'azure' + }) + search_query = request.GET.get('search', '') field_filter = request.GET.get('field', '') field_value = request.GET.get('field_value', '') @@ -89,7 +119,8 @@ def NormalizedLogListView(request): 'end_date': end_date, 'is_paginated': page_obj.has_other_pages(), 'all_tags': all_tags, - 'aws_account': aws_account, + 'accounts': accounts, + 'selected_account': account_filter, 'case': case } diff --git a/apps/gcp/forms.py b/apps/gcp/forms.py new file mode 100644 index 0000000..11460ab --- /dev/null +++ b/apps/gcp/forms.py @@ -0,0 +1,52 @@ +from django import forms +from .models import GCPAccount +import json + +class GCPAccountForm(forms.ModelForm): + service_account_key = forms.FileField( + help_text="Upload your service account key JSON file", + widget=forms.FileInput(attrs={'class': 'form-control'}) + ) + + class Meta: + model = GCPAccount + fields = ['project_id'] + widgets = { + 'project_id': forms.TextInput(attrs={ + 'placeholder': 'e.g., my-project-123456', + 'class': 'form-control' + }) + } + labels = { + 'project_id': 'Project ID', + } + help_texts = { + 'project_id': 'Found in GCP Console under Project Info', + } + + def clean_service_account_key(self): + file = self.cleaned_data['service_account_key'] + try: + content = file.read().decode('utf-8') + json_content = json.loads(content) + + required_fields = ['type', 'project_id', 'private_key_id', 'private_key', 'client_email'] + for field in required_fields: + if field not in json_content: + raise forms.ValidationError(f"Service account key is missing required field: {field}") + + if json_content['type'] != 'service_account': + raise forms.ValidationError("Invalid service account key format") + + return json_content + except json.JSONDecodeError: + raise forms.ValidationError("Invalid JSON format in service account key file") + except Exception as e: + raise forms.ValidationError(f"Error processing service account key: {str(e)}") + + def save(self, commit=True): + instance = super().save(commit=False) + instance.service_account_info = self.cleaned_data['service_account_key'] + if commit: + instance.save() + return instance diff --git a/apps/gcp/migrations/0001_initial.py b/apps/gcp/migrations/0001_initial.py new file mode 100644 index 0000000..6bebe3c --- /dev/null +++ b/apps/gcp/migrations/0001_initial.py @@ -0,0 +1,63 @@ +# Generated by Django 5.1.3 on 2025-02-26 00:03 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('case', '0001_initial'), + ('data', '0008_alter_normalizedlog_aws_account'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='GCPAccount', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('project_id', models.CharField(max_length=100, unique=True)), + ('service_account_info', models.JSONField()), + ('added_at', models.DateTimeField(auto_now_add=True)), + ('validated', models.BooleanField(default=False)), + ('added_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='added_gcp_accounts', to=settings.AUTH_USER_MODEL)), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gcp_accounts', to='case.case')), + ], + ), + migrations.CreateModel( + name='GCPLogSource', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('service_name', models.CharField(max_length=100)), + ('log_name', models.CharField(max_length=255)), + ('log_details', models.JSONField(blank=True, null=True)), + ('status', models.CharField(max_length=50)), + ('location', models.CharField(blank=True, max_length=50, null=True)), + ('slug', models.SlugField(blank=True, max_length=255, unique=True)), + ('discovered_at', models.DateTimeField(auto_now_add=True)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='log_sources', to='gcp.gcpaccount')), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gcp_log_sources', to='case.case')), + ('tags', models.ManyToManyField(related_name='gcp_log_source', to='data.tag')), + ], + ), + migrations.CreateModel( + name='GCPResource', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('resource_id', models.CharField(max_length=200)), + ('resource_type', models.CharField(max_length=100)), + ('resource_name', models.CharField(max_length=200)), + ('location', models.CharField(blank=True, max_length=50, null=True)), + ('resource_details', models.JSONField(blank=True, null=True)), + ('slug', models.SlugField(blank=True, max_length=255, unique=True)), + ('discovered_at', models.DateTimeField(auto_now_add=True)), + ('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='resources', to='gcp.gcpaccount')), + ('case', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gcp_resources', to='case.case')), + ('tags', models.ManyToManyField(related_name='gcp_resource', to='data.tag')), + ], + ), + ] diff --git a/apps/gcp/models.py b/apps/gcp/models.py index 71a8362..5b61abc 100644 --- a/apps/gcp/models.py +++ b/apps/gcp/models.py @@ -1,3 +1,70 @@ from django.db import models +from django.conf import settings +from apps.case.models import Case +from django.utils.text import slugify +from apps.data.models import Tag -# Create your models here. +class GCPAccount(models.Model): + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='gcp_accounts') + project_id = models.CharField(max_length=100, unique=True) + service_account_info = models.JSONField() # Stores the service account key JSON + added_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, related_name='added_gcp_accounts') + added_at = models.DateTimeField(auto_now_add=True) + validated = models.BooleanField(default=False) + + def __str__(self): + return f"GCP Project {self.project_id} for Case {self.case.name}" + +class GCPResource(models.Model): + account = models.ForeignKey('GCPAccount', on_delete=models.CASCADE, related_name='resources') + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='gcp_resources') + resource_id = models.CharField(max_length=200) + resource_type = models.CharField(max_length=100) # e.g., compute.googleapis.com/Instance + resource_name = models.CharField(max_length=200) + location = models.CharField(max_length=50, blank=True, null=True) # GCP region/zone + resource_details = models.JSONField(blank=True, null=True) + slug = models.SlugField(max_length=255, unique=True, blank=True) + discovered_at = models.DateTimeField(auto_now_add=True) + + tags = models.ManyToManyField(Tag, related_name='gcp_resource') + + def save(self, *args, **kwargs): + if not self.slug: + base_slug = slugify(f"{self.resource_type}-{self.resource_name}") + unique_slug = base_slug + num = 1 + while GCPResource.objects.filter(slug=unique_slug).exists(): + unique_slug = f"{base_slug}-{num}" + num += 1 + self.slug = unique_slug + super().save(*args, **kwargs) + + def __str__(self): + return f"{self.resource_type} - {self.resource_name}" + +class GCPLogSource(models.Model): + account = models.ForeignKey('GCPAccount', on_delete=models.CASCADE, related_name='log_sources') + case = models.ForeignKey(Case, on_delete=models.CASCADE, related_name='gcp_log_sources') + service_name = models.CharField(max_length=100) # e.g., CloudAudit.googleapis.com + log_name = models.CharField(max_length=255) + log_details = models.JSONField(blank=True, null=True) + status = models.CharField(max_length=50) + location = models.CharField(max_length=50, blank=True, null=True) + slug = models.SlugField(max_length=255, unique=True, blank=True) + discovered_at = models.DateTimeField(auto_now_add=True) + + tags = models.ManyToManyField(Tag, related_name='gcp_log_source') + + def save(self, *args, **kwargs): + if not self.slug: + base_slug = slugify(f"{self.service_name}-{self.log_name}") + unique_slug = base_slug + num = 1 + while GCPLogSource.objects.filter(slug=unique_slug).exists(): + unique_slug = f"{base_slug}-{num}" + num += 1 + self.slug = unique_slug + super().save(*args, **kwargs) + + def __str__(self): + return f"{self.service_name} - {self.log_name}" diff --git a/apps/gcp/tasks.py b/apps/gcp/tasks.py new file mode 100644 index 0000000..04026d1 --- /dev/null +++ b/apps/gcp/tasks.py @@ -0,0 +1,17 @@ +from celery import shared_task +from .models import GCPAccount +from .utils import pull_gcp_resources, discover_log_sources +import logging + +logger = logging.getLogger(__name__) + +@shared_task +def pull_gcp_resources_task(gcp_account_id): + """Background task to pull GCP resources""" + try: + gcp_account = GCPAccount.objects.get(id=gcp_account_id) + pull_gcp_resources(gcp_account) + discover_log_sources(gcp_account) + except Exception as e: + logger.error(f"Error pulling GCP resources: {e}") + raise diff --git a/apps/gcp/urls.py b/apps/gcp/urls.py new file mode 100644 index 0000000..0dada6f --- /dev/null +++ b/apps/gcp/urls.py @@ -0,0 +1,23 @@ +from django.urls import path +from . import views + +app_name = "gcp" + +urlpatterns = [ + path('/connect/gcp/', views.connect_gcp, name='connect_gcp'), + path('accounts//edit/', views.edit_account, name='edit_account'), + path('accounts//delete/', views.delete_account, name='delete_account'), + path('accounts//resources/', views.account_resources, name='account_resources'), + path('accounts//logs/', views.normalized_logs, name='normalized_logs'), + path('accounts//pull-resources/', views.pull_resources, name='pull_resources'), + + # Tag management URLs + path('resources//tags/add/', views.add_tag_to_resource, name='add_tag_to_resource'), + path('resources//tags//edit/', views.edit_resource_tag, name='edit_resource_tag'), + path('resources//tags//remove/', views.remove_tag_from_resource, name='remove_tag_from_resource'), + path('logsources//tags/add/', views.add_tag_to_logsource, name='add_tag_to_logsource'), + path('logsources//tags//edit/', views.edit_logsource_tag, name='edit_logsource_tag'), + path('logsources//tags//remove/', views.remove_tag_from_logsource, name='remove_tag_from_logsource'), + path('resources//', views.resource_details, name='resource_details'), + path('logsources//', views.logsource_details, name='logsource_details'), +] \ No newline at end of file diff --git a/apps/gcp/utils.py b/apps/gcp/utils.py new file mode 100644 index 0000000..7587ec8 --- /dev/null +++ b/apps/gcp/utils.py @@ -0,0 +1,196 @@ +from google.oauth2 import service_account +from google.cloud.resourcemanager_v3 import ProjectsClient +from google.cloud import storage +from google.cloud import logging +from google.cloud import compute_v1 +import logging as python_logging +from .models import GCPResource, GCPLogSource + +logger = python_logging.getLogger(__name__) + +def validate_gcp_credentials(project_id, service_account_info): + """Validate GCP credentials by attempting to create a client and list resources""" + try: + # Create credentials from service account info + credentials = service_account.Credentials.from_service_account_info( + service_account_info, + scopes=[ + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/logging.read' + ] + ) + + # Verify project access + client = ProjectsClient(credentials=credentials) + try: + project = client.get_project(name=f'projects/{project_id}') + if not project: + return False, "Project not found or no access" + except Exception as e: + error_str = str(e) + logger.error(f"Error accessing project: {error_str}") + + if "SERVICE_DISABLED" in error_str: + if "cloudresourcemanager.googleapis.com" in error_str: + return False, "Cloud Resource Manager API is not enabled. Please enable it in the Google Cloud Console." + elif "storage.googleapis.com" in error_str: + return False, "Cloud Storage API is not enabled. Please enable it in the Google Cloud Console." + elif "logging.googleapis.com" in error_str: + return False, "Cloud Logging API is not enabled. Please enable it in the Google Cloud Console." + elif "compute.googleapis.com" in error_str: + return False, "Compute Engine API is not enabled. Please enable it in the Google Cloud Console." + else: + return False, f"Required API is not enabled: {error_str}" + + return False, f"Project access error: {error_str}" + + # Test Storage access + try: + storage_client = storage.Client(credentials=credentials, project=project_id) + storage_client.list_buckets(max_results=1) + except Exception as e: + logger.warning(f"Storage access test failed: {str(e)}") + # Continue validation even if storage access fails + + # Test Logging access + try: + logging_client = logging.Client(credentials=credentials, project=project_id) + logging_client.list_entries(max_results=1) + except Exception as e: + logger.warning(f"Logging access test failed: {str(e)}") + # Continue validation even if logging access fails + + logger.info(f"Successfully validated GCP credentials for project {project_id}") + return True, None + + except Exception as e: + error_message = str(e) + logger.error(f"GCP credential validation error: {error_message}") + + if "invalid_grant" in error_message.lower(): + return False, "Invalid service account key" + elif "permission denied" in error_message.lower(): + return False, "Permission denied. Please check the service account roles" + elif "project not found" in error_message.lower(): + return False, "Project not found. Please check the Project ID" + else: + return False, f"Validation error: {error_message}" + +def serialize_resource_details(resource): + """Serialize resource details, handling non-serializable objects""" + if hasattr(resource, 'to_dict'): + return resource.to_dict() + elif isinstance(resource, dict): + return {key: serialize_resource_details(value) for key, value in resource.items()} + elif isinstance(resource, list): + return [serialize_resource_details(item) for item in resource] + else: + return str(resource) + +def pull_gcp_resources(gcp_account): + """Pull GCP resources for the given account""" + logger.info(f"Pulling GCP resources for project: {gcp_account.project_id}") + + try: + credentials = service_account.Credentials.from_service_account_info( + gcp_account.service_account_info, + scopes=[ + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/compute.readonly' + ] + ) + + # Get Compute Engine instances + try: + instance_client = compute_v1.InstancesClient(credentials=credentials) + request = compute_v1.AggregatedListInstancesRequest( + project=gcp_account.project_id + ) + for zone, response in instance_client.aggregated_list(request=request): + if response.instances: + for instance in response.instances: + GCPResource.objects.update_or_create( + account=gcp_account, + case=gcp_account.case, + resource_id=instance.id, + defaults={ + 'resource_type': 'compute.googleapis.com/Instance', + 'resource_name': instance.name, + 'location': zone.split('/')[-1], + 'resource_details': { + 'machine_type': instance.machine_type, + 'status': instance.status, + 'creation_timestamp': instance.creation_timestamp, + 'network_interfaces': [ + { + 'network': nic.network, + 'subnetwork': nic.subnetwork, + 'internal_ip': nic.network_i_p, + 'external_ip': nic.access_configs[0].nat_i_p if nic.access_configs else None + } for nic in instance.network_interfaces + ] + } + } + ) + except Exception as e: + logger.error(f"Error listing Compute Engine instances: {e}") + + # Get Storage buckets + try: + storage_client = storage.Client(credentials=credentials, project=gcp_account.project_id) + for bucket in storage_client.list_buckets(): + GCPResource.objects.update_or_create( + account=gcp_account, + case=gcp_account.case, + resource_id=bucket.id, + defaults={ + 'resource_type': 'storage.googleapis.com/Bucket', + 'resource_name': bucket.name, + 'location': bucket.location, + 'resource_details': { + 'storage_class': bucket.storage_class, + 'created': bucket.time_created.isoformat() if bucket.time_created else None, + 'updated': bucket.updated.isoformat() if bucket.updated else None, + 'versioning_enabled': bucket.versioning_enabled + } + } + ) + except Exception as e: + logger.error(f"Error listing Storage buckets: {e}") + + except Exception as e: + logger.error(f"Error pulling GCP resources: {e}") + raise + +def discover_log_sources(gcp_account): + """Discover available log sources in the GCP project""" + logger.info(f"Discovering log sources for project: {gcp_account.project_id}") + + try: + credentials = service_account.Credentials.from_service_account_info( + gcp_account.service_account_info, + scopes=['https://www.googleapis.com/auth/logging.read'] + ) + + logging_client = logging.Client(credentials=credentials, project=gcp_account.project_id) + + # List all log entries to discover available log types + for entry in logging_client.list_entries(page_size=1000, order_by=logging.DESCENDING): + GCPLogSource.objects.update_or_create( + account=gcp_account, + case=gcp_account.case, + service_name=entry.resource.type, + log_name=entry.log_name, + defaults={ + 'status': 'Enabled', + 'log_details': { + 'resource_type': entry.resource.type, + 'severity': entry.severity + } + } + ) + + except Exception as e: + logger.error(f"Error discovering log sources: {e}") + raise diff --git a/apps/gcp/views.py b/apps/gcp/views.py index 91ea44a..96ab23c 100644 --- a/apps/gcp/views.py +++ b/apps/gcp/views.py @@ -1,3 +1,264 @@ -from django.shortcuts import render +from django.shortcuts import render, redirect, get_object_or_404 +from django.contrib.auth.decorators import login_required +from django.contrib import messages +from apps.case.models import Case +from .models import GCPAccount, GCPResource, GCPLogSource +from apps.data.models import NormalizedLog +from .forms import GCPAccountForm +from .utils import validate_gcp_credentials +from datetime import datetime, timedelta +from django.utils import timezone +from apps.data.models import Tag # Create your views here. + +@login_required +def connect_gcp(request, slug): + """Connect a GCP project to a case.""" + case = get_object_or_404(Case, slug=slug) + + if request.method == "POST": + form = GCPAccountForm(request.POST, request.FILES) + if form.is_valid(): + gcp_account = form.save(commit=False) + gcp_account.case = case + gcp_account.added_by = request.user + + # Validate credentials + is_valid, error_message = validate_gcp_credentials( + project_id=gcp_account.project_id, + service_account_info=gcp_account.service_account_info + ) + gcp_account.validated = is_valid + gcp_account.save() + + if is_valid: + messages.success(request, "GCP project connected successfully!") + else: + messages.error(request, f"GCP project saved, but validation failed: {error_message}") + + return redirect('case:case_detail', slug=case.slug) + else: + form = GCPAccountForm() + + return render(request, 'gcp/connect_gcp.html', {'form': form, 'case': case}) + +@login_required +def edit_account(request, project_id): + """Edit an existing GCP account.""" + account = get_object_or_404(GCPAccount, project_id=project_id) + + if request.method == "POST": + form = GCPAccountForm(request.POST, request.FILES, instance=account) + if form.is_valid(): + gcp_account = form.save(commit=False) + + # Re-validate credentials + is_valid, error_message = validate_gcp_credentials( + project_id=gcp_account.project_id, + service_account_info=gcp_account.service_account_info + ) + + gcp_account.validated = is_valid + gcp_account.save() + + if is_valid: + messages.success(request, "GCP project updated and credentials validated successfully!") + else: + messages.error(request, f"GCP project updated, but validation failed: {error_message}") + + return redirect('case:case_detail', slug=account.case.slug) + else: + form = GCPAccountForm(instance=account) + + return render(request, 'gcp/edit_account.html', {'form': form, 'account': account}) + +@login_required +def delete_account(request, project_id): + """Delete a GCP account.""" + account = get_object_or_404(GCPAccount, project_id=project_id) + slug = account.case.slug + account.delete() + messages.success(request, "GCP project disconnected successfully.") + return redirect('case:case_detail', slug=slug) + +@login_required +def account_resources(request, project_id): + """Display GCP resources and log sources for an account.""" + gcp_account = get_object_or_404(GCPAccount, project_id=project_id) + case = gcp_account.case + + # Group resources by their type + resources = GCPResource.objects.filter(account=gcp_account).order_by('resource_type', 'resource_name') + grouped_resources = {} + for resource in resources: + grouped_resources.setdefault(resource.resource_type, []).append(resource) + + # Group log sources by service + log_sources = GCPLogSource.objects.filter(account=gcp_account).order_by('service_name', 'log_name') + grouped_log_sources = {} + for log_source in log_sources: + grouped_log_sources.setdefault(log_source.service_name, []).append(log_source) + + # Add error messages if applicable + error_messages = [] + if not resources.exists(): + error_messages.append("No GCP resources found for this project.") + if not log_sources.exists(): + error_messages.append("No GCP log sources found for this project.") + + context = { + 'gcp_account': gcp_account, + 'case': case, + 'grouped_resources': grouped_resources, + 'grouped_log_sources': grouped_log_sources, + 'error_messages': error_messages, + 'all_tags': Tag.objects.all(), + } + return render(request, 'gcp/account_resources.html', context) + +@login_required +def normalized_logs(request, project_id): + """Display normalized logs for a GCP project.""" + gcp_account = get_object_or_404(GCPAccount, project_id=project_id) + + # Get date range from request + start_date = request.GET.get('start_date') + end_date = request.GET.get('end_date') + + # Set default date range if not provided (last 7 days) + if not start_date: + start_date = (timezone.now() - timedelta(days=7)).strftime('%Y-%m-%d') + if not end_date: + end_date = timezone.now().strftime('%Y-%m-%d') + + # Convert to datetime objects + start_datetime = datetime.strptime(start_date, '%Y-%m-%d') + end_datetime = datetime.strptime(end_date, '%Y-%m-%d') + timedelta(days=1) + + # Make datetime objects timezone-aware + start_datetime = timezone.make_aware(start_datetime) + end_datetime = timezone.make_aware(end_datetime) + + # Query logs + logs = NormalizedLog.objects.filter( + case=gcp_account.case, + event_source='gcp', + event_time__gte=start_datetime, + event_time__lt=end_datetime, + gcp_account=gcp_account + ).order_by('-event_time') + + context = { + "gcp_account": gcp_account, + "logs": logs, + "start_date": start_date, + "end_date": end_date, + } + return render(request, "gcp/normalized_logs.html", context) + +@login_required +def pull_resources(request, project_id): + """Pull latest resources from GCP project.""" + gcp_account = get_object_or_404(GCPAccount, project_id=project_id) + + try: + # Import here to avoid circular imports + from .tasks import pull_gcp_resources_task + pull_gcp_resources_task.delay(gcp_account.id) + messages.success(request, "Resource refresh started. This may take a few minutes.") + except Exception as e: + logger.error(f"Error starting resource pull: {e}") + messages.error(request, "Error starting resource refresh. Please try again.") + + return redirect('gcp:account_resources', project_id=project_id) + +@login_required +def add_tag_to_resource(request, resource_id): + """Add a tag to a GCP resource.""" + resource = get_object_or_404(GCPResource, id=resource_id) + if request.method == "POST": + tag_id = request.POST.get('tag_id') + tag = get_object_or_404(Tag, id=tag_id) + resource.tags.add(tag) + messages.success(request, f"Tag '{tag.name}' added to resource.") + return redirect('gcp:account_resources', project_id=resource.account.project_id) + +@login_required +def edit_resource_tag(request, resource_id, tag_id): + """Edit a tag on a GCP resource.""" + resource = get_object_or_404(GCPResource, id=resource_id) + old_tag = get_object_or_404(Tag, id=tag_id) + if request.method == "POST": + new_tag_id = request.POST.get('new_tag_id') + new_tag = get_object_or_404(Tag, id=new_tag_id) + resource.tags.remove(old_tag) + resource.tags.add(new_tag) + messages.success(request, f"Tag updated from '{old_tag.name}' to '{new_tag.name}'.") + return redirect('gcp:account_resources', project_id=resource.account.project_id) + +@login_required +def remove_tag_from_resource(request, resource_id, tag_id): + """Remove a tag from a GCP resource.""" + resource = get_object_or_404(GCPResource, id=resource_id) + tag = get_object_or_404(Tag, id=tag_id) + resource.tags.remove(tag) + messages.success(request, f"Tag '{tag.name}' removed from resource.") + return redirect('gcp:account_resources', project_id=resource.account.project_id) + +@login_required +def add_tag_to_logsource(request, logsource_id): + """Add a tag to a GCP log source.""" + log_source = get_object_or_404(GCPLogSource, id=logsource_id) + if request.method == "POST": + tag_id = request.POST.get('tag_id') + tag = get_object_or_404(Tag, id=tag_id) + log_source.tags.add(tag) + messages.success(request, f"Tag '{tag.name}' added to log source.") + return redirect('gcp:account_resources', project_id=log_source.account.project_id) + +@login_required +def edit_logsource_tag(request, logsource_id, tag_id): + """Edit a tag on a GCP log source.""" + log_source = get_object_or_404(GCPLogSource, id=logsource_id) + old_tag = get_object_or_404(Tag, id=tag_id) + if request.method == "POST": + new_tag_id = request.POST.get('new_tag_id') + new_tag = get_object_or_404(Tag, id=new_tag_id) + log_source.tags.remove(old_tag) + log_source.tags.add(new_tag) + messages.success(request, f"Tag updated from '{old_tag.name}' to '{new_tag.name}'.") + return redirect('gcp:account_resources', project_id=log_source.account.project_id) + +@login_required +def remove_tag_from_logsource(request, logsource_id, tag_id): + """Remove a tag from a GCP log source.""" + log_source = get_object_or_404(GCPLogSource, id=logsource_id) + tag = get_object_or_404(Tag, id=tag_id) + log_source.tags.remove(tag) + messages.success(request, f"Tag '{tag.name}' removed from log source.") + return redirect('gcp:account_resources', project_id=log_source.account.project_id) + +@login_required +def resource_details(request, slug): + """Display detailed information about a GCP resource.""" + resource = get_object_or_404(GCPResource, slug=slug) + account = resource.account + + context = { + 'resource': resource, + 'account': account, + } + return render(request, 'gcp/resource_details.html', context) + +@login_required +def logsource_details(request, logsource_id): + """Display detailed information about a GCP log source.""" + log_source = get_object_or_404(GCPLogSource, id=logsource_id) + account = log_source.account + + context = { + 'log_source': log_source, + 'account': account, + } + return render(request, 'gcp/logsource_details.html', context) diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt index 880ab10..37a38fd 100644 --- a/requirements/dev-requirements.txt +++ b/requirements/dev-requirements.txt @@ -1,38 +1,11 @@ # This file was autogenerated by uv via the following command: # uv pip compile --no-emit-package setuptools --no-strip-extras requirements/dev-requirements.in -o requirements/dev-requirements.txt -arrow==1.3.0 - # via cookiecutter -binaryornot==0.4.4 - # via cookiecutter build==1.2.2.post1 # via pip-tools -certifi==2024.8.30 - # via - # -c requirements/requirements.txt - # requests -chardet==5.2.0 - # via binaryornot -charset-normalizer==3.4.0 - # via - # -c requirements/requirements.txt - # requests click==8.1.7 # via # -c requirements/requirements.txt - # cookiecutter # pip-tools -idna==3.10 - # via - # -c requirements/requirements.txt - # requests -jinja2==3.1.4 - # via cookiecutter -markdown-it-py==3.0.0 - # via rich -markupsafe==3.0.2 - # via jinja2 -mdurl==0.1.2 - # via markdown-it-py packaging==24.2 # via # -c requirements/requirements.txt @@ -41,40 +14,10 @@ pip==24.3.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/dev-requirements.in -pygments==2.18.0 - # via rich pyproject-hooks==1.2.0 # via # build # pip-tools -python-dateutil==2.9.0.post0 - # via - # -c requirements/requirements.txt - # arrow -python-slugify==8.0.4 - # via cookiecutter -pyyaml==6.0.2 - # via - # -c requirements/requirements.txt - # cookiecutter -requests==2.32.3 - # via - # -c requirements/requirements.txt - # cookiecutter -rich==13.9.4 - # via cookiecutter -six==1.16.0 - # via - # -c requirements/requirements.txt - # python-dateutil -text-unidecode==1.3 - # via python-slugify -types-python-dateutil==2.9.0.20241003 - # via arrow -urllib3==2.2.3 - # via - # -c requirements/requirements.txt - # requests wheel==0.45.0 # via pip-tools diff --git a/requirements/requirements.in b/requirements/requirements.in index 00f6732..f6e875a 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,4 +1,3 @@ -boto3 Django django-allauth[socialaccount,mfa] django-htmx @@ -10,7 +9,7 @@ djangorestframework djangorestframework-api-key django-health-check drf-spectacular -django-anymail[mailgun] # email services +django-anymail[mailgun] django-storages celery-progress celery @@ -18,3 +17,21 @@ celery[redis] django-celery-beat psycopg2-binary whitenoise[brotli] + +#AWS +boto3 + +#Azure +azure-identity +azure-mgmt-resource +azure-mgmt-monitor +azure-mgmt-loganalytics +azure-graphrbac +msrestazure + +#GCP +google-cloud-resource-manager +google-cloud-storage +google-cloud-logging +google-cloud-compute +google-auth \ No newline at end of file diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 4d7f7f0..47f1742 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,5 +1,7 @@ # This file was autogenerated by uv via the following command: # uv pip compile --no-emit-package setuptools --no-strip-extras requirements/requirements.in -o requirements/requirements.txt +adal==1.2.7 + # via msrestazure amqp==5.3.1 # via kombu asgiref==3.8.1 @@ -11,6 +13,32 @@ attrs==24.2.0 # via # jsonschema # referencing +azure-common==1.1.28 + # via + # azure-graphrbac + # azure-mgmt-loganalytics + # azure-mgmt-monitor + # azure-mgmt-resource +azure-core==1.32.0 + # via + # azure-identity + # azure-mgmt-core + # msrest +azure-graphrbac==0.61.2 + # via -r requirements/requirements.in +azure-identity==1.20.0 + # via -r requirements/requirements.in +azure-mgmt-core==1.5.0 + # via + # azure-mgmt-loganalytics + # azure-mgmt-monitor + # azure-mgmt-resource +azure-mgmt-loganalytics==12.0.0 + # via -r requirements/requirements.in +azure-mgmt-monitor==6.0.2 + # via -r requirements/requirements.in +azure-mgmt-resource==23.2.0 + # via -r requirements/requirements.in billiard==4.2.1 # via celery boto3==1.35.60 @@ -21,6 +49,8 @@ botocore==1.35.60 # s3transfer brotli==1.1.0 # via whitenoise +cachetools==5.5.2 + # via google-auth celery[redis]==5.4.0 # via # -r requirements/requirements.in @@ -28,7 +58,9 @@ celery[redis]==5.4.0 celery-progress==0.4 # via -r requirements/requirements.in certifi==2024.8.30 - # via requests + # via + # msrest + # requests cffi==1.17.1 # via cryptography charset-normalizer==3.4.0 @@ -49,8 +81,13 @@ cron-descriptor==1.4.5 # via django-celery-beat cryptography==43.0.3 # via + # adal + # azure-identity # fido2 + # msal # pyjwt +deprecated==1.2.18 + # via opentelemetry-api django==5.1.3 # via # -r requirements/requirements.in @@ -98,10 +135,75 @@ drf-spectacular==0.27.2 # via -r requirements/requirements.in fido2==1.1.3 # via django-allauth +google-api-core[grpc]==2.24.1 + # via + # google-cloud-appengine-logging + # google-cloud-compute + # google-cloud-core + # google-cloud-logging + # google-cloud-resource-manager + # google-cloud-storage +google-auth==2.38.0 + # via + # -r requirements/requirements.in + # google-api-core + # google-cloud-appengine-logging + # google-cloud-compute + # google-cloud-core + # google-cloud-logging + # google-cloud-resource-manager + # google-cloud-storage +google-cloud-appengine-logging==1.6.0 + # via google-cloud-logging +google-cloud-audit-log==0.3.0 + # via google-cloud-logging +google-cloud-compute==1.26.0 + # via -r requirements/requirements.in +google-cloud-core==2.4.2 + # via + # google-cloud-logging + # google-cloud-storage +google-cloud-logging==3.11.4 + # via -r requirements/requirements.in +google-cloud-resource-manager==1.14.1 + # via -r requirements/requirements.in +google-cloud-storage==3.0.0 + # via -r requirements/requirements.in +google-crc32c==1.6.0 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 + # via google-cloud-storage +googleapis-common-protos[grpc]==1.68.0 + # via + # google-api-core + # google-cloud-audit-log + # grpc-google-iam-v1 + # grpcio-status +grpc-google-iam-v1==0.14.0 + # via + # google-cloud-logging + # google-cloud-resource-manager +grpcio==1.70.0 + # via + # google-api-core + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-status +grpcio-status==1.70.0 + # via google-api-core idna==3.10 # via requests +importlib-metadata==8.5.0 + # via opentelemetry-api inflection==0.5.1 # via drf-spectacular +isodate==0.7.2 + # via + # azure-mgmt-monitor + # azure-mgmt-resource + # msrest jmespath==1.0.1 # via # boto3 @@ -112,22 +214,70 @@ jsonschema-specifications==2024.10.1 # via jsonschema kombu==5.4.2 # via celery +msal==1.31.1 + # via + # azure-identity + # msal-extensions +msal-extensions==1.2.0 + # via azure-identity +msrest==0.7.1 + # via + # azure-graphrbac + # azure-mgmt-loganalytics + # msrestazure +msrestazure==0.6.4.post1 + # via + # -r requirements/requirements.in + # azure-graphrbac oauthlib==3.2.2 # via requests-oauthlib +opentelemetry-api==1.30.0 + # via google-cloud-logging packaging==24.2 # via djangorestframework-api-key +portalocker==2.10.1 + # via msal-extensions prompt-toolkit==3.0.48 # via click-repl +proto-plus==1.26.0 + # via + # google-api-core + # google-cloud-appengine-logging + # google-cloud-compute + # google-cloud-logging + # google-cloud-resource-manager +protobuf==5.29.3 + # via + # google-api-core + # google-cloud-appengine-logging + # google-cloud-audit-log + # google-cloud-compute + # google-cloud-logging + # google-cloud-resource-manager + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-status + # proto-plus psycopg2-binary==2.9.10 # via -r requirements/requirements.in +pyasn1==0.6.1 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 + # via google-auth pycparser==2.22 # via cffi pyjwt[crypto]==2.9.0 - # via django-allauth + # via + # adal + # django-allauth + # msal python-crontab==3.2.0 # via django-celery-beat python-dateutil==2.9.0.post0 # via + # adal # botocore # celery # python-crontab @@ -143,21 +293,39 @@ referencing==0.35.1 # jsonschema-specifications requests==2.32.3 # via + # adal + # azure-core # django-allauth # django-anymail + # google-api-core + # google-cloud-storage + # msal + # msrest # requests-oauthlib requests-oauthlib==2.0.0 - # via django-allauth + # via + # django-allauth + # msrest rpds-py==0.21.0 # via # jsonschema # referencing +rsa==4.9 + # via google-auth s3transfer==0.10.3 # via boto3 six==1.16.0 - # via python-dateutil + # via + # azure-core + # msrestazure + # python-dateutil sqlparse==0.5.2 # via django +typing-extensions==4.12.2 + # via + # azure-core + # azure-identity + # azure-mgmt-resource tzdata==2024.2 # via # celery @@ -179,3 +347,7 @@ wcwidth==0.2.13 # via prompt-toolkit whitenoise[brotli]==6.8.2 # via -r requirements/requirements.in +wrapt==1.17.2 + # via deprecated +zipp==3.21.0 + # via importlib-metadata diff --git a/scope/urls.py b/scope/urls.py index 6bc3874..14b7b68 100644 --- a/scope/urls.py +++ b/scope/urls.py @@ -38,12 +38,11 @@ path("", include("apps.web.urls")), path("case/", include("apps.case.urls")), path("aws/", include("apps.aws.urls")), + path("azure/", include("apps.azure.urls")), path("data/", include("apps.data.urls")), path("analysis/", include("apps.analysis.urls")), + path("gcp/", include("apps.gcp.urls")), path("celery-progress/", include("celery_progress.urls")), # API docs path("api/schema/", SpectacularAPIView.as_view(), name="schema"), - # Optional UI - you may wish to remove one of these depending on your preference - path("api/schema/swagger-ui/", SpectacularSwaggerView.as_view(url_name="schema"), name="swagger-ui"), - path("api/schema/redoc/", SpectacularRedocView.as_view(url_name="schema"), name="redoc"), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) diff --git a/static/images/cloud/azure.svg b/static/images/cloud/azure.svg new file mode 100644 index 0000000..ff5dfa5 --- /dev/null +++ b/static/images/cloud/azure.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/static/images/cloud/gcp.svg b/static/images/cloud/gcp.svg new file mode 100644 index 0000000..2516a83 --- /dev/null +++ b/static/images/cloud/gcp.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/templates/analysis/case_detections.html b/templates/analysis/case_detections.html index f9f8410..03f26b2 100644 --- a/templates/analysis/case_detections.html +++ b/templates/analysis/case_detections.html @@ -59,6 +59,34 @@

Total Matches

+ +
+
+

Filters

+
+
+
+
+ + +
+
+ +
+
+
+
+ {% if results_by_detection %} {% for detection, results in results_by_detection.items %} diff --git a/templates/azure/account_resources.html b/templates/azure/account_resources.html new file mode 100644 index 0000000..f3b2409 --- /dev/null +++ b/templates/azure/account_resources.html @@ -0,0 +1,272 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} + +{% block app %} +
+
+ +
+
+

Azure Account Overview

+

Subscription: {{ azure_account.subscription_id }}

+
+ +
+ + +
+
+

Azure Resources

+
+
+ {% if grouped_resources %} + {% for resource_type, resources in grouped_resources.items %} +

{{ resource_type }}

+
+ {% for resource in resources %} +
+
+
+
+

+ {{ resource.resource_name|default:resource.resource_id }} +

+
+ {% for tag in resource.tags.all %} + + {{ tag.name }} + + {% empty %} + + {% endfor %} +
+
+

Resource Group: {{ resource.resource_group }}

+

Location: {{ resource.location }}

+ + View Details + +
+
+
+ {% endfor %} +
+ {% endfor %} + {% else %} +
+

No resources found. Would you like to pull resources?

+ + Generate Overview + +
+ {% endif %} +
+
+ + +
+
+

Azure Log Sources

+
+
+ {% for message in error_messages %} +
{{ message }}
+ {% endfor %} + + {% if grouped_log_sources %} + {% for service_name, log_sources in grouped_log_sources.items %} +

{{ service_name }}

+
+ {% for log_source in log_sources %} +
+
+
+
+

{{ log_source.log_name }}

+
+ {% for tag in log_source.tags.all %} + + {{ tag.name }} + + {% empty %} + + {% endfor %} +
+
+
+

Location: {{ log_source.location|default:"Global" }}

+

Status: {{ log_source.status }}

+
+ + View Details + +
+
+
+ {% endfor %} +
+ {% endfor %} + {% else %} +
+

No log sources found.

+
+ {% endif %} +
+
+
+
+ + + +{% for resource_type, resources in grouped_resources.items %} + {% for resource in resources %} + + {% endfor %} +{% endfor %} + + +{% for service_name, log_sources in grouped_log_sources.items %} + {% for log_source in log_sources %} + + {% endfor %} +{% endfor %} + + +{% for resource_type, resources in grouped_resources.items %} + {% for resource in resources %} + {% for tag in resource.tags.all %} + + {% endfor %} + {% endfor %} +{% endfor %} + + +{% for service_name, log_sources in grouped_log_sources.items %} + {% for log_source in log_sources %} + {% for tag in log_source.tags.all %} + + {% endfor %} + {% endfor %} +{% endfor %} + +{% endblock %} diff --git a/templates/azure/connect_azure.html b/templates/azure/connect_azure.html new file mode 100644 index 0000000..3067aba --- /dev/null +++ b/templates/azure/connect_azure.html @@ -0,0 +1,102 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} + +
+
+ +
+
+

Connect Azure Account

+

{{ case.name }}

+
+ + Back to Case + +
+ + +
+
+
+ {% csrf_token %} + + {% for field in form %} +
+ + {{ field }} + {% if field.help_text %} +
{{ field.help_text }}
+ {% endif %} + {% if field.errors %} + {% for error in field.errors %} +
{{ error }}
+ {% endfor %} + {% endif %} +
+ {% endfor %} + +
+ Cancel + +
+
+
+
+ + +
+
+
How to get these credentials?
+
    +
  1. Go to the Azure Portal and create a new App Registration in Azure Active Directory
  2. +
  3. Note down the Application (client) ID and Directory (tenant) ID
  4. +
  5. Create a new client secret and save it securely
  6. +
  7. Add API permissions in the App Registration: +
      +
    • Microsoft Graph API: +
        +
      • Directory.Read.All - Read directory data
      • +
      • User.Read.All - Read all users' profiles
      • +
      • AuditLog.Read.All - Read audit log data
      • +
      • SecurityEvents.Read.All - Read security events
      • +
      +
    • +
    • Click "Grant admin consent" for these permissions
    • +
    +
  8. +
  9. Assign RBAC roles at the subscription level (IAM): +
      +
    • Search for your App Registration name
    • +
    • Add role assignments: +
        +
      • Reader
      • +
      • Log Analytics Reader
      • +
      • Security Reader
      • +
      +
    • +
    +
  10. +
+
+ These permissions provide read-only access suitable for incident response and investigation purposes. +
+
+ Remember to click "Grant admin consent" in API permissions after adding them. +
+ + Learn more about creating an Azure service principal + +
+
+
+
+ +{% endblock %} diff --git a/templates/azure/edit_account.html b/templates/azure/edit_account.html new file mode 100644 index 0000000..e7e82b3 --- /dev/null +++ b/templates/azure/edit_account.html @@ -0,0 +1,75 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} + +
+
+ +
+
+

Edit Azure Account

+

Subscription: {{ account.subscription_id }}

+
+ + Back to Case + +
+ + +
+
+
+ {% csrf_token %} + + {% for field in form %} +
+ + {{ field }} + {% if field.help_text %} +
{{ field.help_text }}
+ {% endif %} + {% if field.errors %} + {% for error in field.errors %} +
{{ error }}
+ {% endfor %} + {% endif %} +
+ {% endfor %} + +
+ Cancel + +
+
+
+
+ + +
+
+
Need to update credentials?
+
    +
  1. Go to Azure Active Directory → App registrations → Your app
  2. +
  3. Find the Application (client) ID and Directory (tenant) ID in Overview
  4. +
  5. To create a new client secret: +
      +
    • Go to Certificates & secrets
    • +
    • Click "New client secret"
    • +
    • Copy the secret value immediately (it won't be shown again)
    • +
    +
  6. +
+
+ Remember to update any role assignments if you create a new app registration. +
+
+
+
+
+ +{% endblock %} diff --git a/templates/azure/get_logs.html b/templates/azure/get_logs.html new file mode 100644 index 0000000..b275721 --- /dev/null +++ b/templates/azure/get_logs.html @@ -0,0 +1,126 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} + +{% block app %} +
+
+ +
+
+

Azure Activity Logs

+

Subscription: {{ azure_account.subscription_id }}

+
+ +
+ + +
+
+

Date Range

+
+
+
+
+
+ + +
+
+ + +
+
+ +
+
+
+
+
+ + +
+ +
+
+
+

Top Users

+
+
+
+ {% for user in top_users %} +
+ {{ user.user_identity|default:"Unknown User" }} + {{ user.count }} +
+ {% empty %} +
+ No users found +
+ {% endfor %} +
+
+
+
+ + +
+
+
+

Top IP Addresses

+
+
+
+ {% for ip in top_ips %} +
+ {{ ip.ip_address|default:"Unknown IP" }} + {{ ip.count }} +
+ {% empty %} +
+ No IP addresses found +
+ {% endfor %} +
+
+
+
+ + +
+
+
+

Top Operations

+
+
+
+ {% for event in top_events %} +
+ {{ event.event_name|default:"Unknown Operation" }} + {{ event.count }} +
+ {% empty %} +
+ No operations found +
+ {% endfor %} +
+
+
+
+
+
+
+{% endblock %} diff --git a/templates/azure/logsource_details.html b/templates/azure/logsource_details.html new file mode 100644 index 0000000..fb4f471 --- /dev/null +++ b/templates/azure/logsource_details.html @@ -0,0 +1,84 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} +
+
+ +
+
+

Log Source Details

+

{{ log_source.log_name }}

+
+ + Back to Account + +
+ + +
+
+

Basic Information

+
+
+
+
Service Name
+
{{ log_source.service_name }}
+ +
Status
+
{{ log_source.status }}
+ +
Location
+
{{ log_source.location|default:"Global" }}
+ +
Discovered
+
{{ log_source.discovered_at|date:"M d, Y H:i" }}
+
+
+
+ + +
+
+

Log Source Details

+
+
+ {% if log_source.log_details %} + {% for key, value in log_source.log_details.items %} +
+

{{ key }}

+ {% if value is iterable and value.items %} + +
+ {% for nested_key, nested_value in value.items %} +
{{ nested_key }}
+
+ {% if nested_value is iterable and nested_value.items %} + +
+ {% for sub_key, sub_value in nested_value.items %} +
{{ sub_key }}
+
{{ sub_value }}
+ {% endfor %} +
+ {% else %} + {{ nested_value }} + {% endif %} +
+ {% endfor %} +
+ {% elif value is iterable and value|length > 50 %} +
{{ value }}
+ {% else %} +

{{ value }}

+ {% endif %} +
+ {% endfor %} + {% else %} +

No additional details available for this log source.

+ {% endif %} +
+
+
+
+{% endblock %} diff --git a/templates/azure/resource_details.html b/templates/azure/resource_details.html new file mode 100644 index 0000000..2356eed --- /dev/null +++ b/templates/azure/resource_details.html @@ -0,0 +1,89 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} +
+
+ +
+
+

Resource Details

+

{{ resource.resource_name|default:resource.resource_id }}

+
+ + Back to Account + +
+ + +
+
+

Basic Information

+
+
+
+
Resource Type
+
{{ resource.resource_type }}
+ +
Resource Group
+
{{ resource.resource_group }}
+ +
Location
+
{{ resource.location }}
+ +
Resource ID
+
+ {{ resource.resource_id }} +
+ +
Discovered
+
{{ resource.discovered_at|date:"M d, Y H:i" }}
+
+
+
+ + +
+
+

Resource Details

+
+
+ {% if resource.resource_details %} + {% for key, value in resource.resource_details.items %} +
+

{{ key }}

+ {% if value is iterable and value.items %} + +
+ {% for nested_key, nested_value in value.items %} +
{{ nested_key }}
+
+ {% if nested_value is iterable and nested_value.items %} + +
+ {% for sub_key, sub_value in nested_value.items %} +
{{ sub_key }}
+
{{ sub_value }}
+ {% endfor %} +
+ {% else %} + {{ nested_value }} + {% endif %} +
+ {% endfor %} +
+ {% elif value is iterable and value|length > 50 %} +
{{ value }}
+ {% else %} +

{{ value }}

+ {% endif %} +
+ {% endfor %} + {% else %} +

No additional details available for this resource.

+ {% endif %} +
+
+
+
+{% endblock %} diff --git a/templates/case/case_detail.html b/templates/case/case_detail.html index 3bdb41e..30b6984 100644 --- a/templates/case/case_detail.html +++ b/templates/case/case_detail.html @@ -92,7 +92,98 @@

AWS Account: {{ account.account_id }}

{% endfor %} - {% else %} + {% endif %} + + {% if azure_accounts %} +
+ {% for account in azure_accounts %} +
+
+
+
+ Azure +

Azure Subscription: {{ account.subscription_id }}

+ {% if account.validated %} + Validated + {% else %} + Not Validated + {% endif %} +
+

+ Tenant: {{ account.tenant_id }} | + Added by {{ account.added_by.username }} on {{ account.added_at|date:"M d, Y" }} +

+
+ +
+ + + + + +
+
+
+ {% endfor %} +
+ {% endif %} + + {% if gcp_accounts %} +
+ {% for account in gcp_accounts %} +
+
+
+
+ GCP +

GCP Project: {{ account.project_id }}

+ {% if account.validated %} + Validated + {% else %} + Not Validated + {% endif %} +
+

+ Added by {{ account.added_by.username }} on {{ account.added_at|date:"M d, Y" }} +

+
+ +
+ + + + + +
+
+
+ {% endfor %} +
+ {% endif %} + + {% if not aws_accounts and not azure_accounts and not gcp_accounts %}

No accounts connected yet. Use "Connect Client" to add accounts.

diff --git a/templates/case/connect_client.html b/templates/case/connect_client.html index cfd0fe2..8265902 100644 --- a/templates/case/connect_client.html +++ b/templates/case/connect_client.html @@ -22,7 +22,9 @@

Connect Client

- + + + + + +
+
+
+ +

Google Cloud Platform

+ + Connect GCP + +
+
+
diff --git a/templates/data/normalized_logs.html b/templates/data/normalized_logs.html index a011b7c..1f5857c 100644 --- a/templates/data/normalized_logs.html +++ b/templates/data/normalized_logs.html @@ -9,9 +9,19 @@

Log Explorer

- - Back to Account - + {% if selected_aws_account %} + + Back to AWS Account + + {% elif selected_azure_account %} + + Back to Azure Account + + {% elif case %} + + Back to Case + + {% endif %} @@ -57,6 +67,18 @@

Search & Filters

+
+ + +
+ {% endfor %} +
+ +

Resource Type: {{ resource.resource_type }}

+

Location: {{ resource.location|default:"Global" }}

+ + View Details + + + + + {% endfor %} + + {% endfor %} + {% else %} +
+

No resources found. Would you like to pull resources?

+ + Generate Overview + +
+ {% endif %} + + + + +
+
+

GCP Log Sources

+
+
+ {% for message in error_messages %} +
{{ message }}
+ {% endfor %} + + {% if grouped_log_sources %} + {% for service_name, log_sources in grouped_log_sources.items %} +

{{ service_name }}

+
+ {% for log_source in log_sources %} +
+
+
+
+

{{ log_source.log_name }}

+
+ {% for tag in log_source.tags.all %} + + {{ tag.name }} + + {% empty %} + + {% endfor %} +
+
+
+

Location: {{ log_source.location|default:"Global" }}

+

Status: {{ log_source.status }}

+
+ + View Details + +
+
+
+ {% endfor %} +
+ {% endfor %} + {% else %} +
+

No log sources found.

+
+ {% endif %} +
+
+ + + + + +{% for resource_type, resources in grouped_resources.items %} + {% for resource in resources %} + + {% endfor %} +{% endfor %} + + +{% for service_name, log_sources in grouped_log_sources.items %} + {% for log_source in log_sources %} + + {% endfor %} +{% endfor %} + + +{% for resource_type, resources in grouped_resources.items %} + {% for resource in resources %} + {% for tag in resource.tags.all %} + + {% endfor %} + {% endfor %} +{% endfor %} + + +{% for service_name, log_sources in grouped_log_sources.items %} + {% for log_source in log_sources %} + {% for tag in log_source.tags.all %} + + {% endfor %} + {% endfor %} +{% endfor %} + +{% endblock %} \ No newline at end of file diff --git a/templates/gcp/connect_gcp.html b/templates/gcp/connect_gcp.html new file mode 100644 index 0000000..7d64c95 --- /dev/null +++ b/templates/gcp/connect_gcp.html @@ -0,0 +1,111 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} + +
+
+ +
+
+

Connect GCP Project

+

{{ case.name }}

+
+ + Back to Case + +
+ + +
+
+
+ {% csrf_token %} + + {% for field in form %} +
+ + {{ field }} + {% if field.help_text %} +
{{ field.help_text }}
+ {% endif %} + {% if field.errors %} + {% for error in field.errors %} +
{{ error }}
+ {% endfor %} + {% endif %} +
+ {% endfor %} + +
+ Cancel + +
+
+
+
+ + +
+
+
How to get these credentials?
+
    +
  1. Go to the Google Cloud Console
  2. +
  3. Select your project
  4. +
  5. Enable required APIs: +
      +
    • Go to APIs & Services > Library
    • +
    • Search for and enable these APIs: +
        +
      • Cloud Resource Manager API
      • +
      • Cloud Storage API
      • +
      • Cloud Logging API
      • +
      • Compute Engine API
      • +
      +
    • +
    • Wait a few minutes after enabling the APIs
    • +
    +
  6. +
  7. Create service account: +
      +
    • Go to IAM & Admin > Service Accounts
    • +
    • Click "Create Service Account"
    • +
    • Add the following roles: +
        +
      • Viewer
      • +
      • Security Reviewer
      • +
      • Logs Viewer
      • +
      +
    • +
    • Create and download a new key (JSON format)
    • +
    +
  8. +
+
+ These permissions provide read-only access suitable for incident response and investigation purposes. +
+
+ Keep your service account key secure and never share it with unauthorized parties. +
+ +
+
+
+
+ +{% endblock %} \ No newline at end of file diff --git a/templates/gcp/edit_account.html b/templates/gcp/edit_account.html new file mode 100644 index 0000000..1a5a5a4 --- /dev/null +++ b/templates/gcp/edit_account.html @@ -0,0 +1,103 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} + +
+
+ +
+
+

Edit GCP Project

+

{{ account.project_id }}

+
+ + Back to Case + +
+ + +
+
+
+ {% csrf_token %} + + {% for field in form %} +
+ + {{ field }} + {% if field.help_text %} +
{{ field.help_text }}
+ {% endif %} + {% if field.errors %} + {% for error in field.errors %} +
{{ error }}
+ {% endfor %} + {% endif %} +
+ {% endfor %} + +
+ Cancel + +
+
+
+
+ + +
+
+
Service Account Requirements
+
    +
  1. Required APIs must be enabled: +
      +
    • Cloud Resource Manager API
    • +
    • Cloud Storage API
    • +
    • Cloud Logging API
    • +
    • Compute Engine API
    • +
    +
  2. +
  3. The service account should have the following roles: +
      +
    • Viewer
    • +
    • Security Reviewer
    • +
    • Logs Viewer
    • +
    +
  4. +
  5. If you're updating the service account key: +
      +
    • Create a new key in JSON format
    • +
    • Upload the new key file
    • +
    • The old key will be replaced
    • +
    +
  6. +
+
+ These permissions provide read-only access suitable for incident response and investigation purposes. +
+
+ Keep your service account key secure and never share it with unauthorized parties. +
+ +
+
+
+
+ +{% endblock %} \ No newline at end of file diff --git a/templates/gcp/logsource_details.html b/templates/gcp/logsource_details.html new file mode 100644 index 0000000..49509fc --- /dev/null +++ b/templates/gcp/logsource_details.html @@ -0,0 +1,84 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} +
+
+ +
+
+

Log Source Details

+

{{ log_source.log_name }}

+
+ + Back to Project + +
+ + +
+
+

Basic Information

+
+
+
+
Service Name
+
{{ log_source.service_name }}
+ +
Status
+
{{ log_source.status }}
+ +
Location
+
{{ log_source.location|default:"Global" }}
+ +
Discovered
+
{{ log_source.discovered_at|date:"M d, Y H:i" }}
+
+
+
+ + +
+
+

Log Source Details

+
+
+ {% if log_source.log_details %} + {% for key, value in log_source.log_details.items %} +
+

{{ key }}

+ {% if value is iterable and value.items %} + +
+ {% for nested_key, nested_value in value.items %} +
{{ nested_key }}
+
+ {% if nested_value is iterable and nested_value.items %} + +
+ {% for sub_key, sub_value in nested_value.items %} +
{{ sub_key }}
+
{{ sub_value }}
+ {% endfor %} +
+ {% else %} + {{ nested_value }} + {% endif %} +
+ {% endfor %} +
+ {% elif value is iterable and value|length > 50 %} +
{{ value }}
+ {% else %} +

{{ value }}

+ {% endif %} +
+ {% endfor %} + {% else %} +

No additional details available for this log source.

+ {% endif %} +
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/gcp/resource_details.html b/templates/gcp/resource_details.html new file mode 100644 index 0000000..3bd6021 --- /dev/null +++ b/templates/gcp/resource_details.html @@ -0,0 +1,86 @@ +{% extends "web/app/app_base.html" %} +{% load i18n %} +{% load static %} +{% block app %} +
+
+ +
+
+

Resource Details

+

{{ resource.resource_name|default:resource.resource_id }}

+
+ + Back to Project + +
+ + +
+
+

Basic Information

+
+
+
+
Resource Type
+
{{ resource.resource_type }}
+ +
Resource ID
+
+ {{ resource.resource_id }} +
+ +
Location
+
{{ resource.location|default:"Global" }}
+ +
Discovered
+
{{ resource.discovered_at|date:"M d, Y H:i" }}
+
+
+
+ + +
+
+

Resource Details

+
+
+ {% if resource.resource_details %} + {% for key, value in resource.resource_details.items %} +
+

{{ key }}

+ {% if value is iterable and value.items %} + +
+ {% for nested_key, nested_value in value.items %} +
{{ nested_key }}
+
+ {% if nested_value is iterable and nested_value.items %} + +
+ {% for sub_key, sub_value in nested_value.items %} +
{{ sub_key }}
+
{{ sub_value }}
+ {% endfor %} +
+ {% else %} + {{ nested_value }} + {% endif %} +
+ {% endfor %} +
+ {% elif value is iterable and value|length > 50 %} +
{{ value }}
+ {% else %} +

{{ value }}

+ {% endif %} +
+ {% endfor %} + {% else %} +

No additional details available for this resource.

+ {% endif %} +
+
+
+
+{% endblock %} \ No newline at end of file