Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 10 additions & 13 deletions analyzer/windows/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1408,10 +1408,11 @@ def _handle_process2(self, data):

return self._inject_process(int(pid), int(tid), int(mode))

def _handle_file_new(self, file_path):
def _handle_file_new(self, data):
"""Notification of a new dropped file."""
if os.path.exists(file_path):
self.analyzer.files.add_file(file_path.decode(), self.pid)
pid, file_path = data.split(b",", 1)
if os.path.exists(file_path.decode()):
self.analyzer.files.add_file(file_path.decode(), pid.decode())

def _handle_file_cape(self, data):
"""Notification of a new dropped file."""
Expand All @@ -1432,9 +1433,9 @@ def _handle_file_cape(self, data):
def _handle_file_del(self, data):
"""Notification of a file being removed (if it exists) - we have to
dump it before it's being removed."""
file_path = data.decode()
if os.path.exists(file_path):
self.analyzer.files.delete_file(file_path, self.pid)
pid, file_path = data.split(b",", 1)
if os.path.exists(file_path.decode()):
self.analyzer.files.delete_file(file_path.decode(), pid.decode())

def _handle_file_dump(self, file_path):
# We extract the file path.
Expand Down Expand Up @@ -1492,19 +1493,15 @@ def _handle_file_move(self, data):
if b"::" not in data:
log.warning("Received FILE_MOVE command from monitor with an incorrect argument")
return

old_filepath, new_filepath = data.split(b"::", 1)
new_filepath = new_filepath.decode()
self.analyzer.files.move_file(old_filepath.decode(), new_filepath, self.pid)
pid, paths = data.split(b",", 1)
old_filepath, new_filepath = paths.split(b"::", 1)
self.analyzer.files.move_file(old_filepath.decode(), new_filepath.decode(), pid.decode())

def dispatch(self, data):
response = "NOPE"
if not data or b":" not in data:
log.critical("Unknown command received from the monitor: %s", data.strip())
else:
# Backwards compatibility (old syntax is, e.g., "FILE_NEW:" vs the
# new syntax, e.g., "1234:FILE_NEW:").
# if data[0].isupper():
command, arguments = data.strip().split(b":", 1)
# Uncomment to debug monitor commands
# if command not in (b"DEBUG", b"INFO"):
Expand Down
Binary file modified analyzer/windows/dll/capemon.dll
Binary file not shown.
Binary file modified analyzer/windows/dll/capemon_x64.dll
Binary file not shown.
10 changes: 10 additions & 0 deletions changelog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
### [04.02.2026]
* Network Analysis:
* Integrated process mapping directly into `network` processing module.
* Added ability to show network details (DNS, HTTP, TCP/UDP) captured from behavioral analysis in the network results.
* This allows recovery of network activity that might be missing from PCAP (e.g., due to capture evasion or failed interception).
* Centralized network utility functions into `lib/cuckoo/common/network_utils.py` for better maintainability and performance.
* New configuration option `process_map` under `[network]` section in `processing.conf`.
* Web UI:
* Added Process Name and PID columns across all network analysis views (TCP, UDP, ICMP, DNS, HTTP, IRC, SMTP).

### [28.01.2026]
* CAPE Agent:
* Ported to Golang for improved stealth, performance, and zero-dependency deployment.
Expand Down
6 changes: 4 additions & 2 deletions conf/default/processing.conf.default
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,10 @@ enabled = no
[network]
enabled = yes
sort_pcap = no
# Enable mapping of network events to specific processes using behavioral analysis data
process_map = no
# Adds network connections seen in behavior but not in PCAP. Requires process_map = yes
merge_behavior_map = no
# DNS whitelisting to ignore domains/IPs configured in network.py
dnswhitelist = yes
# additional entries
Expand Down Expand Up @@ -324,5 +328,3 @@ enabled = no
# plain-text TLS streams into the task PCAP.
enabled = no

[network_process_map]
enabled = no
275 changes: 275 additions & 0 deletions lib/cuckoo/common/network_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,275 @@
# Copyright (C) 2010-2015 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.

import datetime
from contextlib import suppress
from urllib.parse import urlparse

DNS_APIS = {
"getaddrinfo",
"getaddrinfow",
"getaddrinfoex",
"getaddrinfoexw",
"gethostbyname",
"gethostbynamew",
"dnsquery_a",
"dnsquery_w",
"dnsqueryex",
"dnsquery",
}


HTTP_HINT_APIS = {
"internetcrackurla",
"internetcrackurlw",
"httpsendrequesta",
"httpsendrequestw",
"internetsendrequesta",
"internetsendrequestw",
"internetconnecta",
"internetconnectw",
"winhttpopenrequest",
"winhttpsendrequest",
"winhttpconnect",
"winhttpopen",
"internetopenurla",
"internetopenurlw",
"httpopenrequesta",
"httpopenrequestw",
"isvalidurl",
}


TLS_HINT_APIS = {
"sslencryptpacket",
"ssldecryptpacket",
"initializesecuritycontexta",
"initializesecuritycontextw",
"initializesecuritycontextexa",
"initializesecuritycontextexw",
"acceptsecuritycontext",
}


def _norm_domain(d):
if not d or not isinstance(d, str):
return None
d = d.strip().strip(".").lower()
return d or None


def _parse_behavior_ts(ts_str):
"""
Parse behavior timestamp like: '2026-01-22 23:46:58,199' -> epoch float
Returns None if parsing fails.
"""
if not ts_str or not isinstance(ts_str, str):
return None
try:
return datetime.datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S,%f").timestamp()
except ValueError:
return None


def _get_call_args_dict(call):
"""Convert arguments list to a dictionary for O(1) access."""
return {a["name"]: a["value"] for a in call.get("arguments", []) if "name" in a}


def _extract_domain_from_call(call, args_map):
# Check named arguments first
for name in (
"hostname",
"host",
"node",
"nodename",
"name",
"domain",
"szName",
"pszName",
"lpName",
"query",
"queryname",
"dns_name",
"QueryName",
"lpstrName",
"pName",
):
v = args_map.get(name)
if isinstance(v, str) and v.strip():
return v

# Heuristic scan of all string arguments
for v in args_map.values():
if isinstance(v, str):
s = v.strip()
if "." in s and " " not in s and s.count(".") <= 10:
return s

return None


def _get_arg_any(args_map, *names):
"""Return the first matching argument value for any of the provided names."""
for n in names:
if n in args_map:
return args_map[n]
return None


def _norm_ip(ip):
if ip is None:
return None
if not isinstance(ip, str):
ip = str(ip)
ip = ip.strip()
return ip or None


def _looks_like_http(buf):
if not buf or not isinstance(buf, str):
return False

first = buf.splitlines()[0].strip() if buf else ""
if not first:
return False

u = first.upper()
if u.startswith("HTTP/1.") or u.startswith("HTTP/2"):
return True

methods = ("GET ", "POST ", "HEAD ", "PUT ", "DELETE ", "OPTIONS ", "PATCH ", "TRACE ")
if any(u.startswith(m) for m in methods) and " HTTP/1." in u:
return True

if u.startswith("CONNECT ") and " HTTP/1." in u:
return True

return False


def _http_host_from_buf(buf):
if not buf or not isinstance(buf, str):
return None

lines = buf.splitlines()
if not lines:
return None

for line in lines[1:50]:
if line.lower().startswith("host:"):
try:
return line.split(":", 1)[1].strip()
except IndexError:
continue

with suppress(Exception):
first = lines[0].strip()
parts = first.split()
if len(parts) >= 2:
target = parts[1].strip()
url = _extract_first_url(target)
if url:
host = _host_from_url(url)
if host:
return host

with suppress(Exception):
first = lines[0].strip()
parts = first.split()
if len(parts) >= 2 and parts[0].upper() == "CONNECT":
return parts[1].strip()

return None


def _safe_int(x):
with suppress(Exception):
return int(x)
return None


def _host_from_url(url):
if not url or not isinstance(url, str):
return None

with suppress(Exception):
u = urlparse(url)
return u.hostname

return None


def _extract_first_url(text):
if not text or not isinstance(text, str):
return None
s = text.strip()
for scheme in ("http://", "https://"):
idx = s.lower().find(scheme)
if idx != -1:
return s[idx:].split()[0].strip('"\',')
return None


def _add_http_host(http_host_map, host, pinfo, sock=None):
"""
Store host keys in a stable way.
Adds:
- normalized host
- if host is host:port and port parses, also normalized host-only
"""
hk = _norm_domain(host)
if not hk:
return

entry = dict(pinfo)
if sock is not None:
entry["socket"] = sock

http_host_map[hk].append(entry)

if ":" in hk:
h_only, p = hk.rsplit(":", 1)
if _safe_int(p) is not None and h_only:
http_host_map[h_only].append(entry)


def _extract_tls_server_name(call, args_map):
"""
Best-effort server name extraction for TLS/SChannel/SSPI.
"""
for name in (
"sni",
"SNI",
"ServerName",
"servername",
"server_name",
"TargetName",
"targetname",
"Host",
"host",
"hostname",
"Url",
"URL",
"url",
):
v = args_map.get(name)
if isinstance(v, str) and v.strip():
s = v.strip()
u = _extract_first_url(s)
if u:
return _host_from_url(u) or s
if "." in s and " " not in s and len(s) < 260:
return s

for v in args_map.values():
if isinstance(v, str):
s = v.strip()
if "." in s and " " not in s and len(s) < 260:
u = _extract_first_url(s)
if u:
return _host_from_url(u) or s
return s

return None
Loading
Loading