Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions conf/default/reporting.conf.default
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ enabled = no
[browserext]
enabled = no

# Google Cloud Storage - Store all copy of analysis foldr in GCS
# Google Cloud Storage
[gcs]
enabled = no
# The name of your Google Cloud Storage bucket where files will be uploaded.
Expand All @@ -231,6 +231,8 @@ exclude_dirs = logs, shots
# Good examples are large report formats you don't need in GCS.
exclude_files =

# The absolute path to your Google Cloud service account JSON key file.
# Can be vm or json
auth_by = vm
# only if auth_by = json. The absolute path to your Google Cloud service account JSON key file.
# This file is required for authentication.
credentials_path = data/gcp-credentials.json
2 changes: 2 additions & 0 deletions docs/book/src/installation/host/gcs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ Before installing the module, you need to prepare your Google Cloud environment.
* Grant it the **Storage Object Creator** or **Storage Object Admin** role. This permission is necessary to write files to the bucket.

3. **Download JSON Key:**
* This step is optional if you use ``auth_by=vm``
* After creating the service account, go to its **Keys** tab.
* Click **Add Key** > **Create new key**.
* Select ``JSON`` as the key type and click **Create**. A JSON file will be downloaded.
Expand All @@ -40,6 +41,7 @@ Module Installation and Configuration
* Edit ``/opt/CAPEv2/conf/reporting.conf``.
* ``[gcs]`` section, enable ``enabled=yes``.
* Set ``bucket_name`` to the name of your GCS bucket.
* Set ``auth_by`` to ``vm`` if using system account or ``json`` if using credential file.
* Set ``credentials_path`` to the **absolute path** where you saved your service account JSON key file.

3. **Restart CAPE-processor:**
Expand Down
26 changes: 15 additions & 11 deletions modules/reporting/gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,16 +45,22 @@ def run(self, results):
bucket_name = self.options.get("bucket_name")
if not bucket_name:
raise CuckooReportError("GCS bucket_name is not configured in reporting.conf -> gcs")
auth_by = self.options.get("auth_by")
if auth_by == "vm":
storage_client = storage.Client()
else:
credentials_path_str = self.options.get("credentials_path")
if not credentials_path_str:
raise CuckooReportError("GCS credentials_path is not configured in reporting.conf -> gcs")

credentials_path = os.path.join(CUCKOO_ROOT, credentials_path_str)
if not os.path.isfile(credentials_path):
raise CuckooReportError(
"GCS credentials_path '%s' is invalid or file does not exist in reporting.conf -> gcs", credentials_path
)

credentials_path_str = self.options.get("credentials_path")
if not credentials_path_str:
raise CuckooReportError("GCS credentials_path is not configured in reporting.conf -> gcs")

credentials_path = os.path.join(CUCKOO_ROOT, credentials_path_str)
if not os.path.isfile(credentials_path):
raise CuckooReportError(
"GCS credentials_path '%s' is invalid or file does not exist in reporting.conf -> gcs", credentials_path
)
credentials = service_account.Credentials.from_service_account_file(credentials_path)
storage_client = storage.Client(credentials=credentials)

# Read the exclusion lists, defaulting to empty strings
exclude_dirs_str = self.options.get("exclude_dirs", "")
Expand All @@ -73,8 +79,6 @@ def run(self, results):
try:
# --- Authentication ---
log.debug("Authenticating with Google Cloud Storage...")
credentials = service_account.Credentials.from_service_account_file(credentials_path)
storage_client = storage.Client(credentials=credentials)
bucket = storage_client.bucket(bucket_name)

# Check if the bucket exists and is accessible
Expand Down