From b90709d3fab89ab4cfaabd55f35db2115f902348 Mon Sep 17 00:00:00 2001 From: Bop <37331768+lhotlan64@users.noreply.github.com> Date: Sat, 19 Oct 2024 20:15:09 +0200 Subject: [PATCH 1/5] Rewrite netstat and arp/net-neighbor collection method --- acquire/acquire.py | 54 ++- acquire/dynamic/windows/arp.py | 245 ++++++++++++ acquire/dynamic/windows/iphlpapi.py | 565 ++++++++++++++++++++++++++++ acquire/dynamic/windows/netstat.py | 234 ++++++++++++ 4 files changed, 1084 insertions(+), 14 deletions(-) create mode 100644 acquire/dynamic/windows/arp.py create mode 100644 acquire/dynamic/windows/iphlpapi.py create mode 100644 acquire/dynamic/windows/netstat.py diff --git a/acquire/acquire.py b/acquire/acquire.py index 6cd607fe..2ef79f82 100644 --- a/acquire/acquire.py +++ b/acquire/acquire.py @@ -30,6 +30,19 @@ from acquire.collector import Collector, get_full_formatted_report, get_report_summary from acquire.dynamic.windows.named_objects import NamedObjectType +from acquire.dynamic.windows.arp import ( + NetAdapter, + NetNeighbor, + get_windows_network_adapters, + get_windows_arp_cache, + get_windows_net_neighbors, + format_net_neighbors_list +) +from acquire.dynamic.windows.netstat import ( + NetConnection, + get_active_connections, + format_net_connections_list +) from acquire.esxi import esxi_memory_context_manager from acquire.gui import GUI from acquire.hashes import ( @@ -377,12 +390,20 @@ def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Ite @register_module("--netstat") @local_module class Netstat(Module): - DESC = "netstat output" - SPEC = [ - ("command", (["powershell.exe", "netstat", "-a", "-n", "-o"], "netstat")), - ] + DESC = "Windows network connections" EXEC_ORDER = ExecutionOrder.BOTTOM + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + net_connections: list[NetConnection] = get_active_connections() + output = format_net_connections_list(net_connections) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "netstat") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) + @register_module("--win-processes") @local_module @@ -417,18 +438,23 @@ class WinArpCache(Module): EXEC_ORDER = ExecutionOrder.BOTTOM @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + network_adapters: list[NetAdapter] = get_windows_network_adapters() + + neighbors: list[NetNeighbor] = [] + if float(target.ntversion) < 6.2: - commands = [ - # < Windows 10 - ("command", (["arp", "-av"], "win7-arp-cache")), - ] + neighbors = get_windows_arp_cache(network_adapters) else: - commands = [ - # Windows 10+ (PowerShell) - ("command", (["PowerShell", "Get-NetNeighbor"], "win10-arp-cache")), - ] - return commands + neighbors = get_windows_net_neighbors(network_adapters) + + output = format_net_neighbors_list(neighbors) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "arp-cache") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["arp-cache"]) @register_module("--win-rdp-sessions") diff --git a/acquire/dynamic/windows/arp.py b/acquire/dynamic/windows/arp.py new file mode 100644 index 00000000..b734c05a --- /dev/null +++ b/acquire/dynamic/windows/arp.py @@ -0,0 +1,245 @@ +from __future__ import annotations + +import ctypes +from json import dumps +from socket import inet_ntop + +from acquire.dynamic.windows.iphlpapi import ( + ADDRESS_FAMILY, + ERROR_NO_DATA, + ERROR_NOT_SUPPORTED, + IF_OPER_STATUS, + IF_TYPE, + IP_ADAPTER_ADDRESSES, + LPVOID, + MIB_IPNET_ROW2, + MIB_IPNET_TABLE2, + MIB_IPNETROW, + MIB_IPNETTABLE, + NO_ERROR, + ULONG, + FreeMibTable, + GetAdaptersAddresses, + GetIpNetTable, + GetIpNetTable2, +) + + +def format_physical_address(data: bytes, length: int) -> str | None: + if length > 0: + return "-".join(f"{b:02X}" for b in data[:length]) + return None + + +class NetAdapter: + def __init__( + self, + index: int, + name: str, + description: str, + friendly_name: str, + physical_address: str | None, + mtu: int, + type: IF_TYPE, + status: IF_OPER_STATUS, + ): + self.index = index + self.name = name + self.description = description + self.friendly_name = friendly_name + self.physical_address = physical_address + self.mtu = mtu + self.type = type + self.operation_status = status + + @staticmethod + def from_adapter_addresses(addresses: IP_ADAPTER_ADDRESSES) -> NetAdapter: + index = addresses.Index + adapter_name = addresses.AdapterName.decode() + adapter_desc = addresses.Description + adapter_friendly = addresses.FriendlyName + physical_addr = format_physical_address(addresses.PhysicalAddress, addresses.PhysicalAddressLength) + mtu = addresses.Mtu + type = IF_TYPE(addresses.IfType) + status = IF_OPER_STATUS(addresses.OperStatus) + + return NetAdapter( + index=index, + name=adapter_name, + description=adapter_desc, + friendly_name=adapter_friendly, + physical_address=physical_addr, + mtu=mtu, + type=type, + status=status, + ) + + @staticmethod + def header_fields() -> list[str]: + return ["Index", "Adapter Name", "Description", "Friendly Name", + "MAC Address", "MTU", "Type", "Operation Status"] + + def as_dict(self, indent=0) -> dict: + return { + 'index': self.index, + 'name': self.name, + 'description': self.description, + 'friendly_name': self.friendly_name, + 'mac': self.physical_address, + 'mtu': self.mtu, + 'type': self.type.name, + 'status': self.operation_status.name + } + + def __str__(self) -> str: + return ( + f"NetAdapter(index={self.index}, name={self.name}, desc={self.description}" + f", friendly={self.friendly_name}, mac={self.physical_address}, mtu={self.mtu}, type={self.type}" + f", status={self.operation_status.name})" + ) + + +class NetNeighbor: + def __init__(self, family: ADDRESS_FAMILY, address: str, mac: str | None, adapter: NetAdapter | None): + self.family: ADDRESS_FAMILY = family + self.address: str = address + self.mac: str | None = mac + self.adapter: NetAdapter | None = adapter + + def as_dict(self) -> dict: + return { + 'family': self.family.name, + 'address': self.address, + 'mac': self.mac if self.mac else '', + 'adapter': self.adapter.as_dict() + } + + def __str__(self) -> str: + return f"NetNeighbor(family={self.family.name}, address={self.address}, mac={self.mac}, adapter={self.adapter})" + + +def get_windows_network_adapters() -> list[NetAdapter]: + adapter_buffer_size = ULONG(0) + GetAdaptersAddresses(ADDRESS_FAMILY.AF_UNSPEC, 0, LPVOID(0), LPVOID(0), ctypes.byref(adapter_buffer_size)) + + if adapter_buffer_size == 0: + return [] + + buffer = ctypes.create_string_buffer(adapter_buffer_size.value) + result = GetAdaptersAddresses(ADDRESS_FAMILY.AF_UNSPEC, 0, LPVOID(0), buffer, ctypes.byref(adapter_buffer_size)) + if result != NO_ERROR: + return [] + + adapters = ctypes.cast(buffer, ctypes.POINTER(IP_ADAPTER_ADDRESSES)) + adapter = adapters.contents + + network_adapters = [] + + while True: + network_adapters.append(NetAdapter.from_adapter_addresses(adapter)) + + if not adapter.Next: + break + + adapter = ctypes.cast(adapter.Next, ctypes.POINTER(IP_ADAPTER_ADDRESSES)).contents + + return network_adapters + + +def get_adapter_by_index(adapters: list[NetAdapter], index: int) -> NetAdapter | None: + for adapter in adapters: + if adapter.index == index: + return adapter + return None + + +def get_windows_arp_cache(adapters: list[NetAdapter]) -> list[NetNeighbor]: + table_buffer_len = ULONG(0) + status = GetIpNetTable(LPVOID(0), ctypes.byref(table_buffer_len), True) + + if status in [ERROR_NO_DATA, ERROR_NOT_SUPPORTED]: + return [] + + buffer = ctypes.create_string_buffer(table_buffer_len.value) + result = GetIpNetTable(buffer, ctypes.byref(table_buffer_len), True) + + if result != NO_ERROR: + return [] + + table = ctypes.cast(buffer, ctypes.POINTER(MIB_IPNETTABLE)).contents + rows = ctypes.cast(table.table, ctypes.POINTER(MIB_IPNETROW * table.dwNumEntries)).contents + + neighbors = [] + + for row in rows: + adapter = get_adapter_by_index(adapters, row.dwIndex) + + entry = NetNeighbor( + family=ADDRESS_FAMILY.AF_INET, + address=inet_ntop(ADDRESS_FAMILY.AF_INET, row.dwAddr), + mac=format_physical_address(row.bPhysAddr, row.dwPhysAddrLen), + adapter=adapter, + ) + + neighbors.append(entry) + + return neighbors + + +def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: + table_pointer = ctypes.POINTER(MIB_IPNET_TABLE2)() + result = GetIpNetTable2(ADDRESS_FAMILY.AF_UNSPEC, ctypes.byref(table_pointer)) + + if result != NO_ERROR: + return [] + + table = table_pointer.contents + rows = ctypes.cast(table.Table, ctypes.POINTER(MIB_IPNET_ROW2 * table.NumEntries)).contents + + neighbors = [] + + for row in rows: + if row.Address.si_family == ADDRESS_FAMILY.AF_INET: + ipv4 = row.Address.Ipv4 + address = inet_ntop(ADDRESS_FAMILY.AF_INET, ipv4.sin_addr) + elif row.Address.si_family == ADDRESS_FAMILY.AF_INET6: + ipv6 = row.Address.Ipv6 + address = f"[{inet_ntop(ADDRESS_FAMILY.AF_INET6, ipv6.sin6_addr)}]" + else: + # We should not end up here, but let's gracefully continue in hope there is more valid data to parse. + continue + + mac = format_physical_address(row.PhysicalAddress, row.PhysicalAddressLength) + adapter = get_adapter_by_index(adapters, row.InterfaceIndex) + neighbor = NetNeighbor(family=ADDRESS_FAMILY(row.Address.si_family), address=address, mac=mac, adapter=adapter) + neighbors.append(neighbor) + + FreeMibTable(table_pointer) + + return neighbors + + +def format_net_neighbors_csv(net_neighbors: list[NetNeighbor]) -> str: + def formatter(neighbor: NetNeighbor) -> str: + return f",".join([str(neighbor.adapter.index), neighbor.address, neighbor.mac if neighbor.mac else ""]) + + header = ",".join(["interface_index", "ip_address", "mac"]) + rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) + + return f"{header}\n{rows}" + + +def format_net_neighbors_json(net_neighbors: list[NetNeighbor], indent=0) -> str: + return dumps(net_neighbors, default=lambda neighbor: neighbor.as_dict(), indent=indent if indent > 0 else None) + + +def format_net_neighbors_list(net_neighbors: list[NetNeighbor]) -> str: + def formatter(neighbor: NetNeighbor) -> str: + mac = neighbor.mac if neighbor.mac else "" + return f"{neighbor.adapter.index:<10}{neighbor.address:<60}{mac:<20}" + + header = f"{'ifIndex':<10}{'IP Address':<60}{'MAC Address':<20}" + header += "\n" + ('=' * len(header)) + rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) + + return f"{header}\n{rows}" diff --git a/acquire/dynamic/windows/iphlpapi.py b/acquire/dynamic/windows/iphlpapi.py new file mode 100644 index 00000000..b37f5afa --- /dev/null +++ b/acquire/dynamic/windows/iphlpapi.py @@ -0,0 +1,565 @@ +import ctypes +from ctypes.wintypes import ( + BOOL, + BYTE, + DWORD, + LPVOID, + LPWSTR, + PDWORD, + SHORT, + ULONG, + USHORT, +) +from enum import IntEnum +from typing import ClassVar + +IF_MAX_PHYS_ADDRESS_LENGTH = 32 +MAX_ADAPTER_ADDRESS_LENGTH = 8 +MAXLEN_PHYSADDR = 8 +MAX_DHCPV6_DUID_LENGTH = 130 + +NO_ERROR = 0 +ERROR_NOT_SUPPORTED = 50 +ERROR_INSUFFICIENT_BUFFER = 122 +ERROR_NO_DATA = 232 + +BITNESS = [32, 64][ctypes.sizeof(LPVOID) == 8] + + +class TCP_TABLE_CLASS(IntEnum): + BASIC_LISTENER = 0 + BASIC_CONNECTIONS = 1 + BASIC_ALL = 2 + OWNER_PID_LISTENER = 3 + OWNER_PID_CONNECTIONS = 4 + OWNER_PID_ALL = 5 + OWNER_MODULE_LISTENER = 6 + OWNER_MODULE_CONNECTIONS = 7 + OWNER_MODULE_ALL = 8 + + +class UDP_TABLE_CLASS(IntEnum): + BASIC = 0 + OWNER_PID = 1 + OWNER_MODUL = 2 + + +class MIB_IPNET_TYPE(IntEnum): + OTHER = 1 + INVALID = 2 + DYNAMIC = 3 + STATIC = 4 + + +class IF_TYPE(IntEnum): + OTHER = 1 + REGULAR_1822 = 2 + HDH_1822 = 3 + DDN_X25 = 4 + RFC877_X25 = 5 + ETHERNET_CSMACD = 6 + IS088023_CSMACD = 7 + ISO88024_TOKENBUS = 8 + ISO88025_TOKENRING = 9 + ISO88026_MAN = 10 + STARLAN = 11 + PROTEON_10MBIT = 12 + PROTEON_80MBIT = 13 + HYPERCHANNEL = 14 + FDDI = 15 + LAP_B = 16 + SDLC = 17 + DS1 = 18 + E1 = 19 + BASIC_ISDN = 20 + PRIMARY_ISDN = 21 + PROP_POINT2POINT_SERIAL = 22 + PPP = 23 + SOFTWARE_LOOPBACK = 24 + EON = 25 + ETHERNET_3MBIT = 26 + NSIP = 27 + SLIP = 28 + ULTRA = 29 + DS3 = 30 + SIP = 31 + FRAMERELAY = 32 + RS232 = 33 + PARA = 34 + ARCNET = 35 + ARCNET_PLUS = 36 + ATM = 37 + MIO_X25 = 38 + SONET = 39 + X25_PLE = 40 + ISO88022_LLC = 41 + LOCALTALK = 42 + SMDS_DXI = 43 + FRAMERELAY_SERVICE = 44 + V35 = 45 + HSSI = 46 + HIPPI = 47 + MODEM = 48 + AAL5 = 49 + SONET_PATH = 50 + SONET_VT = 51 + SMDS_ICIP = 52 + PROP_VIRTUAL = 53 + PROP_MULTIPLEXOR = 54 + IEEE80212 = 55 + FIBRECHANNEL = 56 + HIPPIINTERFACE = 57 + FRAMERELAY_INTERCONNECT = 58 + AFLANE_8023 = 59 + AFLANE_8025 = 60 + CCTEMUL = 61 + FASTETHER = 62 + ISDN = 63 + V11 = 64 + V36 = 65 + G703_64K = 66 + G703_2MB = 67 + QLLC = 68 + FASTETHER_FX = 69 + CHANNEL = 70 + IEEE80211 = 71 + IBM370PARCHAN = 72 + ESCON = 73 + DLSW = 74 + ISDN_S = 75 + ISDN_U = 76 + LAP_D = 77 + IPSWITCH = 78 + RSRB = 79 + ATM_LOGICAL = 80 + DS0 = 81 + DS0_BUNDLE = 82 + BSC = 83 + ASYNC = 84 + CNR = 85 + ISO88025R_DTR = 86 + EPLRS = 87 + ARAP = 88 + PROP_CNLS = 89 + HOSTPAD = 90 + TERMPAD = 91 + FRAMERELAY_MPI = 92 + X213 = 93 + ADSL = 94 + RADSL = 95 + SDSL = 96 + VDSL = 97 + ISO88025_CRFPRINT = 98 + MYRINET = 99 + VOICE_EM = 100 + VOICE_FXO = 101 + VOICE_FXS = 102 + VOICE_ENCAP = 103 + VOICE_OVERIP = 104 + ATM_DXI = 105 + ATM_FUNI = 106 + ATM_IMA = 107 + PPPMULTILINKBUNDLE = 108 + IPOVER_CDLC = 109 + IPOVER_CLAW = 110 + STACKTOSTACK = 111 + VIRTUALIPADDRESS = 112 + MPC = 113 + IPOVER_ATM = 114 + ISO88025_FIBER = 115 + TDLC = 116 + GIGABITETHERNET = 117 + HDLC = 118 + LAP_F = 119 + V37 = 120 + X25_MLP = 121 + X25_HUNTGROUP = 122 + TRANSPHDLC = 123 + INTERLEAVE = 124 + FAST = 125 + IP = 126 + DOCSCABLE_MACLAYER = 127 + DOCSCABLE_DOWNSTREAM = 128 + DOCSCABLE_UPSTREAM = 129 + A12MPPSWITCH = 130 + TUNNEL = 131 + COFFEE = 132 + CES = 133 + ATM_SUBINTERFACE = 134 + L2_VLAN = 135 + L3_IPVLAN = 136 + L3_IPXVLAN = 137 + DIGITALPOWERLINE = 138 + MEDIAMAILOVERIP = 139 + DTM = 140 + DCN = 141 + IPFORWARD = 142 + MSDSL = 143 + IEEE1394 = 144 + IF_GSN = 145 + DVBRCC_MACLAYER = 146 + DVBRCC_DOWNSTREAM = 147 + DVBRCC_UPSTREAM = 148 + ATM_VIRTUAL = 149 + MPLS_TUNNEL = 150 + SRP = 151 + VOICEOVERATM = 152 + VOICEOVERFRAMERELAY = 153 + IDSL = 154 + COMPOSITELINK = 155 + SS7_SIGLINK = 156 + PROP_WIRELESS_P2P = 157 + FR_FORWARD = 158 + RFC1483 = 159 + USB = 160 + IEEE8023AD_LAG = 161 + BGP_POLICY_ACCOUNTING = 162 + FRF16_MFR_BUNDLE = 163 + H323_GATEKEEPER = 164 + H323_PROXY = 165 + MPLS = 166 + MF_SIGLINK = 167 + HDSL2 = 168 + SHDSL = 169 + DS1_FDL = 170 + POS = 171 + DVB_ASI_IN = 172 + DVB_ASI_OUT = 173 + PLC = 174 + NFAS = 175 + TR008 = 176 + GR303_RDT = 177 + GR303_IDT = 178 + ISUP = 179 + PROP_DOCS_WIRELESS_MACLAYER = 180 + PROP_DOCS_WIRELESS_DOWNSTREAM = 181 + PROP_DOCS_WIRELESS_UPSTREAM = 182 + HIPERLAN2 = 183 + PROP_BWA_P2MP = 184 + SONET_OVERHEAD_CHANNEL = 185 + DIGITAL_WRAPPER_OVERHEAD_CHANNEL = 186 + AAL2 = 187 + RADIO_MAC = 188 + ATM_RADIO = 189 + IMT = 190 + MVL = 191 + REACH_DSL = 192 + FR_DLCI_ENDPT = 193 + ATM_VCI_ENDPT = 194 + OPTICAL_CHANNEL = 195 + OPTICAL_TRANSPORT = 196 + IEEE80216_WMAN = 237 + WWANPP = 243 + WWANPP2 = 244 + IEEE802154 = 259 + + +class ADDRESS_FAMILY(IntEnum): + AF_UNSPEC = 0 + AF_INET = 2 + AF_INET6 = 23 + + +class NL_NEIGHBOR_STATE(IntEnum): + UNREACHABLE = 0 + INCOMPLETE = 1 + PROBE = 2 + DELAY = 3 + STALE = 4 + REACHABLE = 5 + PERMANENT = 6 + MAXIMUM = 7 + + +class IF_OPER_STATUS(IntEnum): + UP = 1 + DOWN = 2 + TESTING = 3 + UNKNOWN = 4 + DORMANT = 5 + NOTPRESENT = 6 + LOWERLAYERDOWN = 7 + + +class NET_IF_CONNECTION_TYPE(IntEnum): + DEDICATED = 1 + PASSIVE = 2 + DEMAND = 3 + MAXIMUM = 4 + + +class TUNNEL_TYPE(IntEnum): + TUNNEL_TYPE_NONE = 0 + TUNNEL_TYPE_OTHER = 1 + TUNNEL_TYPE_DIRECT = 2 + TUNNEL_TYPE_6TO4 = 11 + TUNNEL_TYPE_ISATAP = 13 + TUNNEL_TYPE_TEREDO = 14 + TUNNEL_TYPE_IPHTTPS = 15 + + +class TCP_CONNECTION_OFFLOAD_STATE(IntEnum): + INHOST = 0 + OFFLOADING = 1 + OFFLOADED = 2 + UPLOADING = 3 + MAX = 4 + + +class MIB_TCP_STATE(IntEnum): + CLOSED = 1 + LISTENING = 2 + SYN_SENT = 3 + SYN_RCVD = 4 + ESTABLISHED = 5 + FIN_WAIT1 = 6 + FIN_WAIT2 = 7 + CLOSE_WAIT = 8 + CLOSING = 9 + LAST_ACK = 10 + TIME_WAIT = 11 + DELETE_TCB = 12 + RESERVED = 100 + + +class CONNECTION_PROTOCOL(IntEnum): + TCP4 = 1 + UDP4 = 2 + TCP6 = 3 + UDP6 = 4 + + +class IN_ADDR(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("S_addr", ULONG), + ] + + +class IN6_ADDR(ctypes.Union): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("Byte", BYTE * 16), + ("Word", USHORT * 8), + ] + + +class SOCKADDR_IN(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("sin_family", SHORT), + ("sin_port", USHORT), + ("sin_addr", IN_ADDR), + ("sin_zero", BYTE * 8), + ] + + +class SOCKADDR_IN6(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("sin6_family", SHORT), + ("sin6_port", USHORT), + ("sin6_flowinfo", ULONG), + ("sin6_addr", IN6_ADDR), + ("sin6_scope_id", ULONG), + ] + + +class SOCKADDR_INET(ctypes.Union): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("Ipv4", SOCKADDR_IN), + ("Ipv6", SOCKADDR_IN6), + ("si_family", USHORT), + ] + + +class MIB_IPNETROW(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwIndex", DWORD), + ("dwPhysAddrLen", DWORD), + ("bPhysAddr", ctypes.c_ubyte * MAXLEN_PHYSADDR), + ("dwAddr", ctypes.c_ubyte * 4), + ("dwType", DWORD), + ] + + +class MIB_IPNET_ROW2(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("Address", SOCKADDR_INET), + ("InterfaceIndex", ULONG), + # Padding when executing under 32-bit Python environment here. + ("InterfaceLuid", LPVOID), + ("PhysicalAddress", ctypes.c_ubyte * IF_MAX_PHYS_ADDRESS_LENGTH), + ("PhysicalAddressLength", ULONG), + ("State", DWORD), + ("Flags", BYTE), + ("ReachabilityTime", ULONG), + ] + + # 32-bit Python does not correctly align the stucture, which results + # in no padding being added after the interface index. This causes + # the size of the object to be 84 instead of the correct 88 bytes. + # To correct for this, we add an extra four bytes of padding after the + # `InterfaceIndex` member. + if BITNESS == 32: + _fields_.insert(2, ('Padding', DWORD)) + + +class MIB_IPNETTABLE(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwNumEntries", DWORD), + ("table", MIB_IPNETROW * 1) + ] + + +class MIB_IPNET_TABLE2(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("NumEntries", ULONG), + ("Padding", ULONG), # Padding to fix alignment + ("Table", MIB_IPNET_ROW2 * 1), + ] + + +class IP_ADAPTER_ADDRESSES(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("Length", ULONG), + ("Index", ULONG), + ("Next", LPVOID), + ("AdapterName", ctypes.c_char_p), + ("FirstUnicastAddress", LPVOID), + ("FirstAnycastAddress", LPVOID), + ("FirstMulticastAddress", LPVOID), + ("FirstDnsServerAddress", LPVOID), + ("DnsSuffix", LPWSTR), + ("Description", LPWSTR), + ("FriendlyName", LPWSTR), + ("PhysicalAddress", ctypes.c_ubyte * MAX_ADAPTER_ADDRESS_LENGTH), + ("PhysicalAddressLength", ULONG), + ("Flags", ULONG), + ("Mtu", ULONG), + ("IfType", ULONG), + ("OperStatus", DWORD), + ("Ipv6IfIndex", ULONG), + ("ZoneIndices", ULONG * 16), + ("FirstPrefix", LPVOID), + ("TransmitLinkSpeed", ctypes.c_ulonglong), + ("ReceiveLinkSpeed", ctypes.c_ulonglong), + ("FirstWinsServerAddress", LPVOID), + ("FirstGatewayAddress", LPVOID), + ("Ipv4Metric", ULONG), + ("Ipv6Metric", ULONG), + ("Luid", LPVOID), + ("Dhcpv4Server", BYTE * 16), + ("CompartmentId", DWORD), + ("Padding", DWORD), # Padding to fix alignment + ("NetworkGuid", BYTE * 16), + ("ConnectionType", DWORD), + ("TunnelType", DWORD), + ("Dhcpv6Server", BYTE * 16), + ("Dhcpv6ClientDuid", BYTE * MAX_DHCPV6_DUID_LENGTH), + ("Dhcpv6ClientDuidLength", ULONG), + ("Dhcpv6Iaid", ULONG), + ("FirstDnsSuffix", LPVOID), + ] + + +class MIB_TCPROW_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwState", DWORD), + ("dwLocalAddr", DWORD), + ("dwLocalPort", DWORD), + ("dwRemoteAddr", DWORD), + ("dwRemotePort", DWORD), + ("dwOwningPid", DWORD), + ] + + +class MIB_TCP6ROW_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("ucLocalAddr", ctypes.c_ubyte * 16), + ("dwLocalScopeId", DWORD), + ("dwLocalPort", DWORD), + ("ucRemoteAddr", ctypes.c_ubyte * 16), + ("dwRemoteScopeId", DWORD), + ("dwRemotePort", DWORD), + ("dwState", DWORD), + ("dwOwningPid", DWORD), + ] + + +class MIB_UDPROW_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwLocalAddr", DWORD), + ("dwLocalPort", DWORD), + ("dwOwningPid", DWORD), + ] + + +class MIB_UDP6ROW_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("ucLocalAddr", ctypes.c_ubyte * 16), + ("dwLocalScopeId", DWORD), + ("dwLocalPort", DWORD), + ("dwOwningPid", DWORD), + ] + + +class MIB_TCPTABLE_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwNumEntries", DWORD), + ("table", MIB_TCPROW_OWNER_PID * 1), + ] + + +class MIB_TCP6TABLE_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwNumEntries", DWORD), + ("table", MIB_TCP6ROW_OWNER_PID * 1), + ] + + +class MIB_UDPTABLE_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwNumEntries", DWORD), + ("table", MIB_UDPROW_OWNER_PID * 1), + ] + + +class MIB_UDP6TABLE_OWNER_PID(ctypes.Structure): + _fields_: ClassVar[list[tuple[str, type]]] = [ + ("dwNumEntries", DWORD), + ("table", MIB_UDP6ROW_OWNER_PID * 1), + ] + + +PULONG = ctypes.POINTER(ULONG) +PMIB_IPNETTABLE = ctypes.POINTER(MIB_IPNETTABLE) +PMIB_IPNET_TABLE2 = ctypes.POINTER(MIB_IPNET_TABLE2) +PMIB_TCPTABLE_OWNER_PID = ctypes.POINTER(MIB_TCPTABLE_OWNER_PID) +PMIB_TCP6TABLE_OWNER_PID = ctypes.POINTER(MIB_TCP6TABLE_OWNER_PID) +PMIB_UDPTABLE_OWNER_PID = ctypes.POINTER(MIB_UDPTABLE_OWNER_PID) +PMIB_UDP6TABLE_OWNER_PID = ctypes.POINTER(MIB_UDP6TABLE_OWNER_PID) + +iphlpapi = ctypes.WinDLL("Iphlpapi.dll") + +# arp calls +GetIpNetTable = iphlpapi.GetIpNetTable +GetIpNetTable.argtypes = [LPVOID, PULONG, BOOL] +GetIpNetTable.restype = ULONG + +GetIpNetTable2 = iphlpapi.GetIpNetTable2 +GetIpNetTable2.argtypes = [ULONG, ctypes.POINTER(PMIB_IPNET_TABLE2)] +GetIpNetTable2.restype = ULONG + +FreeMibTable = iphlpapi.FreeMibTable +FreeMibTable.argtypes = [LPVOID] +FreeMibTable.restype = None + +GetAdaptersAddresses = iphlpapi.GetAdaptersAddresses +GetAdaptersAddresses.argtypes = [ULONG, ULONG, LPVOID, LPVOID, PULONG] +GetAdaptersAddresses.restype = ULONG + +# net connection calls +GetExtendedTcpTable = iphlpapi.GetExtendedTcpTable +GetExtendedTcpTable.argtypes = [LPVOID, PDWORD, BOOL, ULONG, ULONG, ULONG] +GetExtendedTcpTable.restype = DWORD + +GetExtendedUdpTable = iphlpapi.GetExtendedUdpTable +GetExtendedUdpTable.argtypes = [LPVOID, PDWORD, BOOL, ULONG, ULONG, ULONG] +GetExtendedUdpTable.restype = DWORD diff --git a/acquire/dynamic/windows/netstat.py b/acquire/dynamic/windows/netstat.py new file mode 100644 index 00000000..8296f921 --- /dev/null +++ b/acquire/dynamic/windows/netstat.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import ctypes +from json import dumps +from socket import htons, inet_ntop +from typing import Callable, Union + +from acquire.dynamic.windows.iphlpapi import ( + ADDRESS_FAMILY, + BOOL, + CONNECTION_PROTOCOL, + ERROR_INSUFFICIENT_BUFFER, + LPVOID, + MIB_TCP6ROW_OWNER_PID, + MIB_TCP6TABLE_OWNER_PID, + MIB_TCP_STATE, + MIB_TCPROW_OWNER_PID, + MIB_TCPTABLE_OWNER_PID, + MIB_UDP6ROW_OWNER_PID, + MIB_UDP6TABLE_OWNER_PID, + MIB_UDPROW_OWNER_PID, + MIB_UDPTABLE_OWNER_PID, + NO_ERROR, + PDWORD, + TCP_CONNECTION_OFFLOAD_STATE, + TCP_TABLE_CLASS, + UDP_TABLE_CLASS, + ULONG, + GetExtendedTcpTable, + GetExtendedUdpTable, +) + +NetConnTableClass = Union[TCP_TABLE_CLASS, UDP_TABLE_CLASS] +NetConnTableType = Union[ + MIB_TCPTABLE_OWNER_PID, MIB_TCP6TABLE_OWNER_PID, MIB_UDPTABLE_OWNER_PID, MIB_UDP6TABLE_OWNER_PID +] +NetConnTableRowType = Union[MIB_TCPROW_OWNER_PID, MIB_TCP6ROW_OWNER_PID, MIB_UDPROW_OWNER_PID, MIB_UDP6ROW_OWNER_PID] +NetConnTableResult = Union[ + MIB_TCPTABLE_OWNER_PID, MIB_TCP6TABLE_OWNER_PID, MIB_UDPTABLE_OWNER_PID, MIB_UDP6TABLE_OWNER_PID +] +NetConnTableCallback = Callable[[LPVOID, PDWORD, BOOL, ULONG, ULONG, ULONG], NetConnTableResult] + +NetConnRowParserArgs = Union[MIB_TCPROW_OWNER_PID, MIB_TCP6ROW_OWNER_PID, MIB_UDPROW_OWNER_PID, MIB_UDP6ROW_OWNER_PID] +NetConnRowParser = Callable[[NetConnRowParserArgs], "NetConnection"] + + +class NetConnection: + def __init__( + self, + protocol: CONNECTION_PROTOCOL, + local_addr: str, + local_port: int, + remote_addr: str | None, + remote_port: int | None, + state: TCP_CONNECTION_OFFLOAD_STATE | None, + pid: int, + ) -> None: + self.protocol = protocol + self.local_address = local_addr + self.local_port = local_port + self.remote_address = remote_addr + self.remote_port = remote_port + self.state = state + self.pid = pid + + def as_dict(self) -> dict: + return { + 'protocol': self.protocol.name, + 'local_address': self.local_address, + 'local_port': self.local_port, + 'remote_address': self.remote_address, + 'remote_port': self.remote_port, + 'state': self.state.name if self.state else None, + 'pid': self.pid + } + + def __str__(self) -> str: + state = self.state.name if self.state else None + return ( + f"NetConnection(protocol={self.protocol.name}, lhost={self.local_address}, lport={self.local_port}, " + f"rhost={self.remote_address}, rport={self.remote_port}, state={state}, pid={self.pid})" + ) + + +def get_netconn_table( + get_netconn_table_proc: NetConnTableCallback, + family: ADDRESS_FAMILY, + cls: NetConnTableClass, + table_type: NetConnTableType, +) -> NetConnTableResult | None: + table_size = ULONG(0) + result = get_netconn_table_proc(LPVOID(0), ctypes.byref(table_size), True, family, cls, ULONG(0)) + + if result != ERROR_INSUFFICIENT_BUFFER: + return None + + buffer = ctypes.create_string_buffer(table_size.value) + result = get_netconn_table_proc(buffer, ctypes.byref(table_size), True, family, cls, ULONG(0)) + + if result != NO_ERROR: + return None + + return ctypes.cast(buffer, ctypes.POINTER(table_type)).contents + + +def parse_netconn_rows( + table: NetConnTableType, row_type: NetConnTableRowType, row_parse_callback: NetConnRowParser +) -> list[NetConnection]: + entries = table.dwNumEntries + rows = ctypes.cast(table.table, ctypes.POINTER(row_type * entries)).contents + + connections = [] + + for row in rows: + conn = row_parse_callback(row) + connections.append(conn) + + return connections + + +def tcp4_row_parser(row: MIB_TCPROW_OWNER_PID) -> NetConnection: + return NetConnection( + protocol=CONNECTION_PROTOCOL.TCP4, + local_addr=inet_ntop(ADDRESS_FAMILY.AF_INET, row.dwLocalAddr.to_bytes(4, byteorder="little")), + local_port=htons(row.dwLocalPort), + remote_addr=inet_ntop(ADDRESS_FAMILY.AF_INET, row.dwRemoteAddr.to_bytes(4, byteorder="little")), + remote_port=htons(row.dwRemotePort), + state=MIB_TCP_STATE(row.dwState), + pid=row.dwOwningPid, + ) + + +def udp4_row_parser(row: MIB_UDPROW_OWNER_PID) -> NetConnection: + return NetConnection( + protocol=CONNECTION_PROTOCOL.UDP4, + local_addr=inet_ntop(ADDRESS_FAMILY.AF_INET, row.dwLocalAddr.to_bytes(4, byteorder="little")), + local_port=htons(row.dwLocalPort), + remote_addr=None, + remote_port=None, + state=None, + pid=row.dwOwningPid, + ) + + +def tcp6_row_parser(row: MIB_TCP6ROW_OWNER_PID) -> NetConnection: + return NetConnection( + protocol=CONNECTION_PROTOCOL.TCP6, + local_addr=f"[{inet_ntop(ADDRESS_FAMILY.AF_INET6, row.ucLocalAddr)}]", + local_port=htons(row.dwLocalPort), + remote_addr=f"[{inet_ntop(ADDRESS_FAMILY.AF_INET6, row.ucRemoteAddr)}]", + remote_port=htons(row.dwRemotePort), + state=MIB_TCP_STATE(row.dwState), + pid=row.dwOwningPid, + ) + + +def udp6_row_parser(row: MIB_UDP6ROW_OWNER_PID) -> NetConnection: + return NetConnection( + protocol=CONNECTION_PROTOCOL.TCP6, + local_addr=f"[{inet_ntop(ADDRESS_FAMILY.AF_INET6, row.ucLocalAddr)}]", + local_port=htons(row.dwLocalPort), + remote_addr=None, + remote_port=None, + state=None, + pid=row.dwOwningPid, + ) + + +def get_active_connections() -> list[NetConnection]: + tcp4_table = get_netconn_table( + GetExtendedTcpTable, ADDRESS_FAMILY.AF_INET, TCP_TABLE_CLASS.OWNER_PID_ALL, MIB_TCPTABLE_OWNER_PID + ) + tcp4_conns = parse_netconn_rows(tcp4_table, MIB_TCPROW_OWNER_PID, tcp4_row_parser) + + tcp6_table = get_netconn_table( + GetExtendedTcpTable, ADDRESS_FAMILY.AF_INET6, TCP_TABLE_CLASS.OWNER_PID_ALL, MIB_TCP6TABLE_OWNER_PID + ) + tcp6_conns = parse_netconn_rows(tcp6_table, MIB_TCP6ROW_OWNER_PID, tcp6_row_parser) + + udp4_table = get_netconn_table( + GetExtendedUdpTable, ADDRESS_FAMILY.AF_INET, UDP_TABLE_CLASS.OWNER_PID, MIB_UDPTABLE_OWNER_PID + ) + udp4_conns = parse_netconn_rows(udp4_table, MIB_UDPROW_OWNER_PID, udp4_row_parser) + + udp6_table = get_netconn_table( + GetExtendedUdpTable, ADDRESS_FAMILY.AF_INET6, UDP_TABLE_CLASS.OWNER_PID, MIB_UDP6TABLE_OWNER_PID + ) + udp6_conns = parse_netconn_rows(udp6_table, MIB_UDP6ROW_OWNER_PID, udp6_row_parser) + + return tcp4_conns + tcp6_conns + udp4_conns + udp6_conns + + +def format_net_connections_csv(net_connections: list[NetConnection]) -> str: + def formatter(connection: NetConnection) -> str: + rhost = connection.remote_address if connection.remote_address else "" + rport = str(connection.remote_port) if connection.remote_port else "" + state = connection.state.name if connection.state else "" + return ",".join([connection.protocol.name, connection.local_address, + str(connection.local_port), rhost, rport, state]) + + header = ",".join(["protocol", "local address", "local port", "remote address", + "remote port", "state"]) + rows = "\n".join(formatter(connection) for connection in net_connections) + + return f"{header}\n{rows}" + + +def format_net_connections_json(net_connections: list[NetConnection], indent=0) -> str: + return dumps(net_connections, default=lambda connection: connection.as_dict(), + indent=indent if indent > 0 else None) + + +def format_net_connections_list(net_connections: list[NetConnection]) -> str: + def formatter(connection: NetConnection) -> str: + rhost = connection.remote_address if connection.remote_address else "" + rport = str(connection.remote_port) if connection.remote_port else "" + state = connection.state.name if connection.state else "" + + lconn = f"{connection.local_address}:{str(connection.local_port)}" + if connection.protocol in [CONNECTION_PROTOCOL.TCP4, CONNECTION_PROTOCOL.TCP6]: + rconn = f"{rhost}:{rport}" + else: + rconn = "*:*" + + return ( + f"{connection.protocol.name:<10}{lconn:<40}{rconn:<40}" + f"{state:<20}{str(connection.pid):<10}" + ) + + header = f"{'Proto':<10}{'Local Address':<40}{'Foreign Address':<40}{'State':<20}{'PID':<10}" + header += "\n" + ('=' * len(header)) + rows = "\n".join(formatter(connection) for connection in net_connections) + + return f"{header}\n{rows}" From b98465a65d312cb627198f9086d2f530b002b171 Mon Sep 17 00:00:00 2001 From: Bop <37331768+lhotlan64@users.noreply.github.com> Date: Tue, 22 Oct 2024 18:31:48 +0200 Subject: [PATCH 2/5] Correctly identify UDP6 connections --- acquire/dynamic/windows/netstat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/acquire/dynamic/windows/netstat.py b/acquire/dynamic/windows/netstat.py index 8296f921..7ba4208c 100644 --- a/acquire/dynamic/windows/netstat.py +++ b/acquire/dynamic/windows/netstat.py @@ -156,7 +156,7 @@ def tcp6_row_parser(row: MIB_TCP6ROW_OWNER_PID) -> NetConnection: def udp6_row_parser(row: MIB_UDP6ROW_OWNER_PID) -> NetConnection: return NetConnection( - protocol=CONNECTION_PROTOCOL.TCP6, + protocol=CONNECTION_PROTOCOL.UDP6, local_addr=f"[{inet_ntop(ADDRESS_FAMILY.AF_INET6, row.ucLocalAddr)}]", local_port=htons(row.dwLocalPort), remote_addr=None, From 6f9a93d003bfd6caf1a143d150d3153748e00b60 Mon Sep 17 00:00:00 2001 From: Bop <37331768+lhotlan64@users.noreply.github.com> Date: Tue, 22 Oct 2024 18:33:18 +0200 Subject: [PATCH 3/5] Do not support the legacy API as the minimum required Python version enforces a minimum Windows version --- acquire/acquire.py | 4636 +++++++++++++-------------- acquire/dynamic/windows/arp.py | 75 +- acquire/dynamic/windows/iphlpapi.py | 22 - 3 files changed, 2340 insertions(+), 2393 deletions(-) diff --git a/acquire/acquire.py b/acquire/acquire.py index 2ef79f82..cb664007 100644 --- a/acquire/acquire.py +++ b/acquire/acquire.py @@ -1,2322 +1,2314 @@ -from __future__ import annotations - -import argparse -import enum -import functools -import io -import itertools -import logging -import os -import platform -import shutil -import subprocess -import sys -import time -import urllib.parse -import urllib.request -from collections import defaultdict, namedtuple -from itertools import product -from pathlib import Path -from typing import BinaryIO, Callable, Iterator, Optional, Union - -from dissect.target import Target -from dissect.target.filesystem import Filesystem -from dissect.target.filesystems import ntfs -from dissect.target.helpers import fsutil -from dissect.target.plugins.apps.webserver import iis -from dissect.target.plugins.os.windows.log import evt, evtx -from dissect.target.tools.utils import args_to_uri -from dissect.util.stream import RunlistStream - -from acquire.collector import Collector, get_full_formatted_report, get_report_summary -from acquire.dynamic.windows.named_objects import NamedObjectType -from acquire.dynamic.windows.arp import ( - NetAdapter, - NetNeighbor, - get_windows_network_adapters, - get_windows_arp_cache, - get_windows_net_neighbors, - format_net_neighbors_list -) -from acquire.dynamic.windows.netstat import ( - NetConnection, - get_active_connections, - format_net_connections_list -) -from acquire.esxi import esxi_memory_context_manager -from acquire.gui import GUI -from acquire.hashes import ( - HashFunc, - collect_hashes, - filter_out_by_path_match, - filter_out_by_value_match, - filter_out_huge_files, - serialize_into_csv, -) -from acquire.log import get_file_handler, reconfigure_log_file, setup_logging -from acquire.outputs import OUTPUTS -from acquire.uploaders.minio import MinIO -from acquire.uploaders.plugin import UploaderPlugin, upload_files_using_uploader -from acquire.uploaders.plugin_registry import UploaderRegistry -from acquire.utils import ( - check_and_set_acquire_args, - check_and_set_log_args, - create_argument_parser, - format_output_name, - get_formatted_exception, - get_user_name, - get_utc_now, - get_utc_now_str, - is_user_admin, - normalize_path, - parse_acquire_args, - persist_execution_report, -) - -try: - from acquire.version import version -except ImportError: - version = "0.0.dev" - -try: - # Injected by pystandalone builder - from acquire.config import CONFIG -except ImportError: - CONFIG = defaultdict(lambda: None) - - -VERSION = version -ACQUIRE_BANNER = r""" - _ - __ _ ___ __ _ _ _(_)_ __ ___ - / _` |/ __/ _` | | | | | '__/ _ \ -| (_| | (_| (_| | |_| | | | | __/ - \__,_|\___\__, |\__,_|_|_| \___| - by Fox-IT |_| v{} - part of NCC Group -""".format( - VERSION -)[ - 1: -] - -MODULES = {} -MODULE_LOOKUP = {} - -CLI_ARGS_MODULE = "cli-args" - -log = logging.getLogger("acquire") -log.propagate = 0 -log_file_handler = None -logging.lastResort = None -logging.raiseExceptions = False - - -def misc_windows_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - misc_dirs = { - ("Windows/ServiceProfiles/LocalService", False), - ("Windows/ServiceProfiles/NetworkService", False), - ("Windows/System32/config/systemprofile", False), - ("Users", True), - ("Documents and Settings", True), - } - - for fs in target.fs.path().iterdir(): - if fs.name.lower() == "c:": - continue - - for misc_dir, get_subdirs in misc_dirs: - misc_path = fs.joinpath(misc_dir) - - if not misc_path.exists(): - continue - - if get_subdirs: - for entry in misc_path.iterdir(): - if entry.is_dir(): - yield entry - else: - yield misc_path - - -def misc_unix_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - user_dirs = ["root", "home/*"] - - home_dirs = (target.fs.path("/").glob(path) for path in user_dirs) - for home_dir in itertools.chain.from_iterable(home_dirs): - yield home_dir - - -def misc_osx_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - for homedir in itertools.chain(target.fs.path("/Users/").glob("*"), misc_unix_user_homes(target)): - yield homedir - - -MISC_MAPPING = { - "osx": misc_osx_user_homes, - "windows": misc_windows_user_homes, -} - - -def from_user_home(target: Target, path: str) -> Iterator[str]: - try: - for user_details in target.user_details.all_with_home(): - yield user_details.home_path.joinpath(path).as_posix() - except Exception as e: - log.warning("Error occurred when requesting all user homes") - log.debug("", exc_info=e) - - misc_user_homes = MISC_MAPPING.get(target.os, misc_unix_user_homes) - for user_dir in misc_user_homes(target): - yield user_dir.joinpath(path).as_posix() - - -def iter_ntfs_filesystems(target: Target) -> Iterator[tuple[ntfs.NtfsFilesystem, Optional[str], str, str]]: - mount_lookup = defaultdict(list) - for mount, fs in target.fs.mounts.items(): - mount_lookup[fs].append(mount) - - for fs in target.filesystems: - # The attr check is needed to correctly collect fake NTFS filesystems - # where the MFT etc. are added to a VirtualFilesystem. This happens for - # instance when the target is an acquired tar target. - if not isinstance(fs, ntfs.NtfsFilesystem) and not hasattr(fs, "ntfs"): - log.warning("Skipping %s - not an NTFS filesystem", fs) - continue - - if fs in mount_lookup: - mountpoints = mount_lookup[fs] - - for main_mountpoint in mountpoints: - if main_mountpoint != "sysvol": - break - - name = main_mountpoint - mountpoints = ", ".join(mountpoints) - else: - main_mountpoint = None - name = f"vol-{fs.ntfs.serial:x}" - mountpoints = "No mounts" - log.warning("Unmounted NTFS filesystem found %s (%s)", fs, name) - - yield fs, main_mountpoint, name, mountpoints - - -def iter_esxi_filesystems(target: Target) -> Iterator[tuple[Filesystem, str, str, Optional[str]]]: - for mount, fs in target.fs.mounts.items(): - if not mount.startswith("/vmfs/volumes/"): - continue - - uuid = mount[len("/vmfs/volumes/") :] # strip /vmfs/volumes/ - name = None - if fs.__type__ == "fat": - name = fs.volume.name - elif fs.__type__ == "vmfs": - name = fs.vmfs.label - - yield fs, mount, uuid, name - - -def register_module(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: - def wrapper(module_cls: type[Module]) -> type[Module]: - name = module_cls.__name__ - - if name in MODULES: - raise ValueError( - f"Module name is already registered: registration for {module_cls} conflicts with {MODULES[name]}" - ) - - desc = module_cls.DESC or name - kwargs["help"] = f"acquire {desc}" - kwargs["action"] = argparse.BooleanOptionalAction - kwargs["dest"] = name.lower() - module_cls.__modname__ = name - - if not hasattr(module_cls, "__cli_args__"): - module_cls.__cli_args__ = [] - module_cls.__cli_args__.append((args, kwargs)) - - MODULES[name] = module_cls - return module_cls - - return wrapper - - -def module_arg(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: - def wrapper(module_cls: type[Module]) -> type[Module]: - if not hasattr(module_cls, "__cli_args__"): - module_cls.__cli_args__ = [] - module_cls.__cli_args__.append((args, kwargs)) - return module_cls - - return wrapper - - -def local_module(cls: type[object]) -> object: - """A decorator that sets property `__local__` on a module class to mark it for local target only""" - cls.__local__ = True - return cls - - -class ExecutionOrder(enum.IntEnum): - TOP = 0 - DEFAULT = 1 - BOTTOM = 2 - - -class Module: - DESC = None - SPEC = [] - EXEC_ORDER = ExecutionOrder.DEFAULT - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - desc = cls.DESC or cls.__name__.lower() - log.info("*** Acquiring %s", desc) - - with collector.bind_module(cls): - collector.collect(cls.SPEC) - - spec_ext = cls.get_spec_additions(target, cli_args) - if spec_ext: - collector.collect(list(spec_ext)) - - cls._run(target, cli_args, collector) - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - pass - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - pass - - -@register_module("--sys") -@local_module -class Sys(Module): - DESC = "Sysfs files (live systems only)" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - spec = [("dir", "/sys")] - collector.collect(spec, follow=False, volatile=True) - - -@register_module("--proc") -@local_module -class Proc(Module): - DESC = "Procfs files (live systems only)" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - spec = [("dir", "/proc")] - collector.collect(spec, follow=False, volatile=True) - - -@register_module("-n", "--ntfs") -class NTFS(Module): - DESC = "NTFS filesystem metadata" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): - log.info("Acquiring from %s as %s (%s)", fs, name, mountpoints) - - for filename in ("$MFT", "$Boot", "$Secure:$SDS"): - if main_mountpoint is not None: - path = fsutil.join(main_mountpoint, filename) - collector.collect_path(path) - - else: - # In case the NTFS filesystem is not mounted, which should not occur but - # iter_ntfs_filesystems allows for the possibility, we fall back to raw file - # collection. - collector.collect_file_raw(filename, fs, name) - - cls.collect_usnjrnl(collector, fs, name) - - @classmethod - def collect_usnjrnl(cls, collector: Collector, fs: Filesystem, name: str) -> None: - def usnjrnl_accessor(journal: BinaryIO) -> tuple[BinaryIO, int]: - # If the filesystem is a virtual NTFS filesystem, journal will be - # plain BinaryIO, not a RunlistStream. - if isinstance(journal, RunlistStream): - i = 0 - while journal.runlist[i][0] is None: - journal.seek(journal.runlist[i][1] * journal.block_size, io.SEEK_CUR) - i += 1 - size = journal.size - journal.tell() - else: - size = journal.size - - return (journal, size) - - collector.collect_file_raw( - "$Extend/$Usnjrnl:$J", - fs, - name, - file_accessor=usnjrnl_accessor, - ) - - -@register_module("-r", "--registry") -class Registry(Module): - DESC = "registry hives" - HIVES = ["drivers", "sam", "security", "software", "system", "default"] - SPEC = [ - ("dir", "sysvol/windows/system32/config/txr"), - ("dir", "sysvol/windows/system32/config/regback"), - ("glob", "sysvol/System Volume Information/_restore*/RP*/snapshot/_REGISTRY_*"), - ("glob", "ntuser.dat*", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/UsrClass.dat*", from_user_home), - ("glob", "Local Settings/Application Data/Microsoft/Windows/UsrClass.dat*", from_user_home), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - # Glob all hives to include e.g. .LOG files and .regtrans-ms files. - files = [] - for hive in cls.HIVES: - pattern = "sysvol/windows/system32/config/{}*".format(hive) - for entry in target.fs.path().glob(pattern): - if entry.is_file(): - files.append(("file", entry)) - return files - - -@register_module("--netstat") -@local_module -class Netstat(Module): - DESC = "Windows network connections" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - net_connections: list[NetConnection] = get_active_connections() - output = format_net_connections_list(net_connections) - - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE - full_output_path = fsutil.join(output_base, "netstat") - - collector.output.write_bytes(full_output_path, output.encode()) - collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) - - -@register_module("--win-processes") -@local_module -class WinProcesses(Module): - DESC = "Windows process list" - SPEC = [ - ("command", (["tasklist", "/V", "/fo", "csv"], "win-processes")), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--win-proc-env") -@local_module -class WinProcEnv(Module): - DESC = "Process environment variables" - SPEC = [ - ( - "command", - ( - ["PowerShell", "-command", "Get-Process | ForEach-Object {$_.StartInfo.EnvironmentVariables}"], - "win-process-env-vars", - ), - ), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--win-arp-cache") -@local_module -class WinArpCache(Module): - DESC = "ARP Cache" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - network_adapters: list[NetAdapter] = get_windows_network_adapters() - - neighbors: list[NetNeighbor] = [] - - if float(target.ntversion) < 6.2: - neighbors = get_windows_arp_cache(network_adapters) - else: - neighbors = get_windows_net_neighbors(network_adapters) - - output = format_net_neighbors_list(neighbors) - - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE - full_output_path = fsutil.join(output_base, "arp-cache") - - collector.output.write_bytes(full_output_path, output.encode()) - collector.report.add_command_collected(cls.__name__, ["arp-cache"]) - - -@register_module("--win-rdp-sessions") -@local_module -class WinRDPSessions(Module): - DESC = "Windows Remote Desktop session information" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - # where.exe instead of where, just in case the client runs in PS instead of CMD - # by default where hides qwinsta on 32-bit systems because qwinsta is only 64-bit, but with recursive /R search - # we can still manage to find it and by passing the exact path Windows will launch a 64-bit process - # on systems capable of doing that. - qwinsta = subprocess.run( - ["where.exe", "/R", os.environ["WINDIR"], "qwinsta.exe"], capture_output=True, text=True - ).stdout.split("\n")[0] - return [ - ("command", ([qwinsta, "/VM"], "win-rdp-sessions")), - ] - - -@register_module("--winpmem") -@local_module -class WinMemDump(Module): - DESC = "Windows full memory dump" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - winpmem_file_name = "winpmem.exe" - winpmem_exec = shutil.which(winpmem_file_name) - - command_parts = [winpmem_exec, "-"] - - if winpmem_exec is None: - command_parts.pop(0) - command_parts.insert(0, winpmem_file_name) - collector.report.add_command_failed(cls.__name__, command_parts) - log.error( - "- Failed to collect output from command `%s`, program %s not found", - " ".join(command_parts), - winpmem_file_name, - ) - return - - else: - log.info("- Collecting output from command `%s`", " ".join(command_parts)) - - mem_dump_path = collector.output.path.with_name("winpmem") - mem_dump_errors_path = mem_dump_path.with_name("winpmem.errors") - - output_base = collector.COMMAND_OUTPUT_BASE - if collector.base: - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) - - mem_dump_output_path = fsutil.join(output_base, mem_dump_path.name) - mem_dump_errors_output_path = fsutil.join(output_base, mem_dump_errors_path.name) - - with mem_dump_path.open(mode="wb") as mem_dump_fh: - with mem_dump_errors_path.open(mode="wb") as mem_dump_errors_fh: - try: - # The shell parameter must be set to False, as otherwise the - # output from stdout is not piped into the filehandle. - # The check parameter must be set to False, as winpmem.exe - # always seems to exit with an error code, even on success. - subprocess.run( - bufsize=0, - args=command_parts, - stdout=mem_dump_fh, - stderr=mem_dump_errors_fh, - shell=False, - check=False, - ) - - except Exception: - collector.report.add_command_failed(cls.__name__, command_parts) - log.error( - "- Failed to collect output from command `%s`", - " ".join(command_parts), - exc_info=True, - ) - return - - collector.output.write_entry(mem_dump_output_path, mem_dump_path) - collector.output.write_entry(mem_dump_errors_output_path, mem_dump_errors_path) - collector.report.add_command_collected(cls.__name__, command_parts) - mem_dump_path.unlink() - mem_dump_errors_path.unlink() - - -@register_module("--winmem-files") -class WinMemFiles(Module): - DESC = "Windows memory files" - SPEC = [ - ("file", "sysvol/pagefile.sys"), - ("file", "sysvol/hiberfil.sys"), - ("file", "sysvol/swapfile.sys"), - ("file", "sysvol/windows/memory.dmp"), - ("dir", "sysvol/windows/minidump"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - page_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Memory Management" - for reg_key in target.registry.iterkeys(page_key): - for page_path in reg_key.value("ExistingPageFiles").value: - spec.add(("file", target.resolve(page_path))) - - crash_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\CrashControl" - for reg_key in target.registry.iterkeys(crash_key): - spec.add(("file", target.resolve(reg_key.value("DumpFile").value))) - spec.add(("dir", target.resolve(reg_key.value("MinidumpDir").value))) - - return spec - - -@register_module("-e", "--eventlogs") -class EventLogs(Module): - DESC = "event logs" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - evt_log_paths = evt.EvtPlugin(target).get_logs(filename_glob="*.evt") - for path in evt_log_paths: - spec.add(("file", path)) - evtx_log_paths = evtx.EvtxPlugin(target).get_logs(filename_glob="*.evtx") - for path in evtx_log_paths: - spec.add(("file", path)) - return spec - - -@register_module("-t", "--tasks") -class Tasks(Module): - SPEC = [ - ("dir", "sysvol/windows/tasks"), - ("dir", "sysvol/windows/system32/tasks"), - ("dir", "sysvol/windows/syswow64/tasks"), - ("dir", "sysvol/windows/sysvol/domain/policies"), - ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), - # Task Scheduler Service transaction log - ("file", "sysvol/SchedLgU.txt"), - ("file", "sysvol/windows/SchedLgU.txt"), - ("file", "sysvol/windows/tasks/SchedLgU.txt"), - ("file", "sysvol/winnt/tasks/SchedLgU.txt"), - ] - - -@register_module("-ad", "--active-directory") -class ActiveDirectory(Module): - DESC = "Active Directory data (policies, scripts, etc.)" - SPEC = [ - ("dir", "sysvol/windows/sysvol/domain"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters" - for reg_key in target.registry.iterkeys(key): - try: - spec.add(("dir", reg_key.value("SysVol").value)) - except Exception: - pass - return spec - - -@register_module("-nt", "--ntds") -class NTDS(Module): - SPEC = [ - ("dir", "sysvol/windows/NTDS"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - key = "HKLM\\SYSTEM\\CurrentControlSet\\services\\NTDS\\Parameters" - values = [ - ("dir", "DSA Working Directory"), - ("file", "DSA Database File"), - ("file", "Database backup path"), - ("dir", "Database log files path"), - ] - for reg_key in target.registry.iterkeys(key): - for collect_type, value in values: - path = reg_key.value(value).value - spec.add((collect_type, path)) - - return spec - - -@register_module("--etl") -class ETL(Module): - DESC = "interesting ETL files" - SPEC = [ - ("glob", "sysvol/Windows/System32/WDI/LogFiles/*.etl"), - ] - - -@register_module("--recents") -class Recents(Module): - DESC = "Windows recently used files artifacts" - SPEC = [ - ("dir", "AppData/Roaming/Microsoft/Windows/Recent", from_user_home), - ("dir", "AppData/Roaming/Microsoft/Office/Recent", from_user_home), - ("glob", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/*.lnk", from_user_home), - ("glob", "Desktop/*.lnk", from_user_home), - ("glob", "Recent/*.lnk", from_user_home), - ("glob", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/*.lnk"), - ] - - -@register_module("--startup") -class Startup(Module): - DESC = "Windows Startup folder" - SPEC = [ - ("dir", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/Startup"), - ("dir", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup", from_user_home), - ] - - -def recyclebin_filter(path: fsutil.TargetPath) -> bool: - return bool(path.stat().st_size >= (10 * 1024 * 1024)) # 10MB - - -@register_module("--recyclebin") -@module_arg( - "--large-files", - action=argparse.BooleanOptionalAction, - help="Collect files larger than 10MB in the Recycle Bin", -) -@module_arg( - "--data-files", - action=argparse.BooleanOptionalAction, - help="Collect the data files in the Recycle Bin", -) -class RecycleBin(Module): - DESC = "recycle bin metadata and data files" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - large_files_filter = None if cli_args.large_files else recyclebin_filter - - if large_files_filter: - log.info("Skipping files in Recycle Bin that are larger than 10MB.") - - patterns = ["$Recycle.bin/*/$I*", "Recycler/*/INFO2", "Recycled/INFO2"] - - if cli_args.data_files is None or cli_args.data_files: - patterns.extend(["$Recycle.Bin/$R*", "$Recycle.Bin/*/$R*", "RECYCLE*/D*"]) - - with collector.file_filter(large_files_filter): - for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): - log.info("Acquiring recycle bin from %s as %s (%s)", fs, name, mountpoints) - - for pattern in patterns: - if main_mountpoint is not None: - pattern = fsutil.join(main_mountpoint, pattern) - collector.collect_glob(pattern) - else: - # In case the NTFS filesystem is not mounted, which should not occur but - # iter_ntfs_filesystems allows for the possibility, we fall back to raw file - # collection. - for entry in fs.path().glob(pattern): - if entry.is_file(): - collector.collect_file_raw(fs, entry, name) - - -@register_module("--drivers") -class Drivers(Module): - DESC = "installed drivers" - SPEC = [ - ("glob", "sysvol/windows/system32/drivers/*.sys"), - ] - - -@register_module("--exchange") -class Exchange(Module): - DESC = "interesting Exchange configuration files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - key = "HKLM\\SOFTWARE\\Microsoft\\ExchangeServer" - for reg_key in target.registry.iterkeys(key): - for subkey in reg_key.subkeys(): - try: - setup_key = subkey.subkey("Setup") - install_path = setup_key.value("MsiInstallPath").value - spec.update( - [ - ( - "file", - f"{install_path}\\TransportRoles\\Agents\\agents.config", - ), - ( - "dir", - f"{install_path}\\Logging\\Ews", - ), - ( - "dir", - f"{install_path}\\Logging\\CmdletInfra\\Powershell-Proxy\\Cmdlet", - ), - ( - "dir", - f"{install_path}\\TransportRoles\\Logs", - ), - ] - ) - except Exception: - pass - return spec - - -@register_module("--iis") -class IIS(Module): - DESC = "IIS logs" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set( - [ - ("glob", "sysvol\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\Windows.old\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\inetpub\\logs\\LogFiles\\*.log"), - ("glob", "sysvol\\inetpub\\logs\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\Resources\\Directory\\*\\LogFiles\\Web\\W3SVC*\\*.log"), - ] - ) - iis_plugin = iis.IISLogsPlugin(target) - spec.update([("file", log_path) for _, log_path in iis_plugin.iter_log_format_path_pairs()]) - return spec - - -@register_module("--prefetch") -class Prefetch(Module): - DESC = "Windows Prefetch files" - SPEC = [ - ("dir", "sysvol/windows/prefetch"), - ] - - -@register_module("--appcompat") -class Appcompat(Module): - DESC = "Windows Amcache and RecentFileCache" - SPEC = [ - ("dir", "sysvol/windows/appcompat"), - ] - - -@register_module("--pca") -class PCA(Module): - DESC = "Windows Program Compatibility Assistant" - SPEC = [ - ("dir", "sysvol/windows/pca"), - ] - - -@register_module("--syscache") -class Syscache(Module): - DESC = "Windows Syscache hive and log files" - SPEC = [ - ("file", "sysvol/System Volume Information/Syscache.hve"), - ("glob", "sysvol/System Volume Information/Syscache.hve.LOG*"), - ] - - -@register_module("--win-notifications") -class WindowsNotifications(Module): - DESC = "Windows Push Notifications Database files." - SPEC = [ - # Old Win7/Win10 version of the file - ("file", "AppData/Local/Microsoft/Windows/Notifications/appdb.dat", from_user_home), - # New version of the file - ("file", "AppData/Local/Microsoft/Windows/Notifications/wpndatabase.db", from_user_home), - ] - - -@register_module("--bits") -class BITS(Module): - DESC = "Background Intelligent Transfer Service (BITS) queue/log DB" - SPEC = [ - # Pre-Win10 the BITS DB files are called qmgr[01].dat, in Win10 it is - # called qmgr.db and its transaction logs edb.log and edb.log[0-2] - # Win 2000/XP/2003 path - # (basically: \%ALLUSERSPROFILE%\Application Data\Microsoft\...) - ("glob", "sysvol/Documents and Settings/All Users/Application Data/Microsoft/Network/Downloader/qmgr*.dat"), - # Win Vista and higher path - # (basically: \%ALLUSERSPROFILE%\Microsoft\...; %ALLUSERSPROFILE% == %PROGRAMDATA%) - ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr*.dat"), - # Win 10 files - ("file", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr.db"), - ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/edb.log*"), - ] - - -@register_module("--wbem") -class WBEM(Module): - DESC = "Windows WBEM (WMI) database files" - SPEC = [ - ("dir", "sysvol/windows/system32/wbem/Repository"), - ] - - -@register_module("--dhcp") -class DHCP(Module): - DESC = "Windows Server DHCP files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\DhcpServer\\Parameters" - for reg_key in target.registry.iterkeys(key): - spec.add(("dir", reg_key.value("DatabasePath").value)) - return spec - - -@register_module("--dns") -class DNS(Module): - DESC = "Windows Server DNS files" - SPEC = [ - ("glob", "sysvol/windows/system32/config/netlogon.*"), - ("dir", "sysvol/windows/system32/dns"), - ] - - -@register_module("--win-dns-cache") -@local_module -class WinDnsClientCache(Module): - DESC = "The contents of Windows DNS client cache" - SPEC = [ - ( - "command", - # Powershell.exe understands a subcommand passed as single string parameter, - # no need to split the subcommand in parts. - ( - ["powershell.exe", "-Command", "Get-DnsClientCache | ConvertTo-Csv -NoTypeInformation"], - "get-dnsclientcache", - ), - ), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--powershell") -class PowerShell(Module): - DESC = "Windows PowerShell Artefacts" - SPEC = [ - ("dir", "AppData/Roaming/Microsoft/Windows/PowerShell", from_user_home), - ] - - -@register_module("--thumbnail-cache") -class ThumbnailCache(Module): - DESC = "Windows thumbnail db artifacts" - SPEC = [ - ("glob", "AppData/Local/Microsoft/Windows/Explorer/thumbcache_*", from_user_home), - ] - - -@register_module("--text-editor") -class TextEditor(Module): - DESC = "text editor (un)saved tab contents" - # Only Windows 11 notepad & Notepad++ tabs for now, but locations for other text editors may be added later. - SPEC = [ - ("dir", "AppData/Local/Packages/Microsoft.WindowsNotepad_8wekyb3d8bbwe/LocalState/TabState/", from_user_home), - ("dir", "AppData/Roaming/Notepad++/backup/", from_user_home), - ] - - -@register_module("--misc") -class Misc(Module): - DESC = "miscellaneous Windows artefacts" - SPEC = [ - ("file", "sysvol/windows/PFRO.log"), - ("file", "sysvol/windows/setupapi.log"), - ("file", "sysvol/windows/setupapidev.log"), - ("glob", "sysvol/windows/inf/setupapi*.log"), - ("glob", "sysvol/system32/logfiles/*/*.txt"), - ("dir", "sysvol/windows/system32/sru"), - ("dir", "sysvol/windows/system32/drivers/etc"), - ("dir", "sysvol/Windows/System32/WDI/LogFiles/StartupInfo"), - ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), - ("dir", "sysvol/ProgramData/Microsoft/Group Policy/History/"), - ("dir", "AppData/Local/Microsoft/Group Policy/History/", from_user_home), - ("glob", "sysvol/Windows/System32/LogFiles/SUM/*.mdb"), - ("glob", "sysvol/ProgramData/USOShared/Logs/System/*.etl"), - ("glob", "sysvol/Windows/Logs/WindowsUpdate/WindowsUpdate*.etl"), - ("glob", "sysvol/Windows/Logs/CBS/CBS*.log"), - ("dir", "sysvol/ProgramData/Microsoft/Search/Data/Applications/Windows"), - ("dir", "sysvol/Windows/SoftwareDistribution/DataStore"), - ] - - -@register_module("--av") -class AV(Module): - DESC = "various antivirus logs" - SPEC = [ - # AVG - ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/log"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/report"), - ("dir", "sysvol/ProgramData/AVG/Antivirus/log"), - ("dir", "sysvol/ProgramData/AVG/Antivirus/report"), - # Avast - ("dir", "sysvol/Documents And Settings/All Users/Application Data/Avast Software/Avast/Log"), - ("dir", "sysvol/ProgramData/Avast Software/Avast/Log"), - ("dir", "Avast Software/Avast/Log", from_user_home), - ("file", "sysvol/ProgramData/Avast Software/Avast/Chest/index.xml"), - # Avira - ("dir", "sysvol/ProgramData/Avira/Antivirus/LOGFILES"), - ("dir", "sysvol/ProgramData/Avira/Security/Logs"), - ("dir", "sysvol/ProgramData/Avira/VPN"), - # Bitdefender - ("dir", "sysvol/ProgramData/Bitdefender/Endpoint Security/Logs"), - ("dir", "sysvol/ProgramData/Bitdefender/Desktop/Profiles/Logs"), - ("glob", "sysvol/Program Files*/Bitdefender*/*"), - # ComboFix - ("file", "sysvol/ComboFix.txt"), - # Cybereason - ("dir", "sysvol/ProgramData/crs1/Logs"), - ("dir", "sysvol/ProgramData/apv2/Logs"), - ("dir", "sysvol/ProgramData/crb1/Logs"), - # Cylance - ("dir", "sysvol/ProgramData/Cylance/Desktop"), - ("dir", "sysvol/ProgramData/Cylance/Optics/Log"), - ("dir", "sysvol/Program Files/Cylance/Desktop/log"), - # ESET - ("dir", "sysvol/Documents and Settings/All Users/Application Data/ESET/ESET NOD32 Antivirus/Logs"), - ("dir", "sysvol/ProgramData/ESET/ESET NOD32 Antivirus/Logs"), - ("dir", "sysvol/ProgramData/ESET/ESET Security/Logs"), - ("dir", "sysvol/ProgramData/ESET/RemoteAdministrator/Agent/EraAgentApplicationData/Logs"), - ("dir", "sysvol/Windows/System32/config/systemprofile/AppData/Local/ESET/ESET Security/Quarantine"), - # Emsisoft - ("glob", "sysvol/ProgramData/Emsisoft/Reports/scan*.txt"), - # F-Secure - ("dir", "sysvol/ProgramData/F-Secure/Log"), - ("dir", "AppData/Local/F-Secure/Log", from_user_home), - ("dir", "sysvol/ProgramData/F-Secure/Antivirus/ScheduledScanReports"), - # HitmanPro - ("dir", "sysvol/ProgramData/HitmanPro/Logs"), - ("dir", "sysvol/ProgramData/HitmanPro.Alert/Logs"), - ("file", "sysvol/ProgramData/HitmanPro.Alert/excalibur.db"), - ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), - # Malwarebytes - ("glob", "sysvol/ProgramData/Malwarebytes/Malwarebytes Anti-Malware/Logs/mbam-log-*.xml"), - ("glob", "sysvol/ProgramData/Malwarebytes/MBAMService/logs/mbamservice.log*"), - ("dir", "AppData/Roaming/Malwarebytes/Malwarebytes Anti-Malware/Logs", from_user_home), - ("dir", "sysvol/ProgramData/Malwarebytes/MBAMService/ScanResults"), - # McAfee - ("dir", "Application Data/McAfee/DesktopProtection", from_user_home), - ("dir", "sysvol/ProgramData/McAfee/DesktopProtection"), - ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs"), - ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs_Old"), - ("dir", "sysvol/ProgramData/Mcafee/VirusScan"), - ("dir", "sysvol/ProgramData/McAfee/MSC/Logs"), - ("dir", "sysvol/ProgramData/McAfee/Agent/AgentEvents"), - ("dir", "sysvol/ProgramData/McAfee/Agent/logs"), - ("dir", "sysvol/ProgramData/McAfee/datreputation/Logs"), - ("dir", "sysvol/ProgramData/Mcafee/Managed/VirusScan/Logs"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Common Framework/AgentEvents"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/MCLOGS/SAE"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/datreputation/Logs"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Managed/VirusScan/Logs"), - ("dir", "sysvol/Program Files (x86)/McAfee/DLP/WCF Service/Log"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Apache2/Logs"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events/Debug"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Server/Logs"), - # RogueKiller - ("glob", "sysvol/ProgramData/RogueKiller/logs/AdliceReport_*.json"), - # SUPERAntiSpyware - ("dir", "AppData/Roaming/SUPERAntiSpyware/Logs", from_user_home), - # SecureAge - ("dir", "sysvol/ProgramData/SecureAge Technology/SecureAge/log"), - # SentinelOne - ("dir", "sysvol/programdata/sentinel/logs"), - # Sophos - ("glob", "sysvol/Documents and Settings/All Users/Application Data/Sophos/Sophos */Logs"), - ("glob", "sysvol/ProgramData/Sophos/Sophos */Logs"), - # Symantec - ( - "dir", - "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Logs/AV", - ), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Logs"), - ("dir", "AppData/Local/Symantec/Symantec Endpoint Protection/Logs", from_user_home), - ("dir", "sysvol/Windows/System32/winevt/logs/Symantec Endpoint Protection Client.evtx"), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/CmnClnt/ccSubSDK"), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/registrationInfo.xml"), - # TotalAV - ("glob", "sysvol/Program Files*/TotalAV/logs"), - ("dir", "sysvol/ProgramData/TotalAV/logs"), - # Trendmicro - ("glob", "sysvol/Program Files*/Trend Micro"), - # VIPRE - ("dir", "sysvol/ProgramData/VIPRE Business Agent/Logs"), - ("dir", "AppData/Roaming/VIPRE Business", from_user_home), - ("dir", "AppData/Roaming/GFI Software/AntiMalware/Logs", from_user_home), - ("dir", "AppData/Roaming/Sunbelt Software/AntiMalware/Logs", from_user_home), - # Webroot - ("file", "sysvol/ProgramData/WRData/WRLog.log"), - # Microsoft Windows Defender - ("dir", "sysvol/ProgramData/Microsoft/Microsoft AntiMalware/Support"), - ("glob", "sysvol/Windows/System32/winevt/Logs/Microsoft-Windows-Windows Defender*.evtx"), - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Support"), - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Scans/History/Service/DetectionHistory"), - ("file", "sysvol/Windows/Temp/MpCmdRun.log"), - ("file", "sysvol/Windows.old/Windows/Temp/MpCmdRun.log"), - ] - - -@register_module("--quarantined") -class QuarantinedFiles(Module): - DESC = "files quarantined by various antivirus products" - SPEC = [ - # Microsoft Defender - # https://knez.github.io/posts/how-to-extract-quarantine-files-from-windows-defender/ - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Quarantine"), - # Symantec Endpoint Protection - ( - "dir", - "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Quarantine", - ), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Quarantine"), - # Trend Micro - # https://secret.inf.ufpr.br/papers/marcus_av_handson.pdf - ("dir", "sysvol/ProgramData/Trend Micro/AMSP/quarantine"), - # McAfee - ("dir", "sysvol/Quarantine"), - ("dir", "sysvol/ProgramData/McAfee/VirusScan/Quarantine"), - # Sophos - ("glob", "sysvol/ProgramData/Sophos/Sophos/*/Quarantine"), - ("glob", "sysvol/ProgramData/Sophos/Sophos */INFECTED"), - ("dir", "sysvol/ProgramData/Sophos/Safestore"), - # HitmanPRO - ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), - ] - - -@register_module("--edr") -class EDR(Module): - DESC = "various Endpoint Detection and Response (EDR) logs" - SPEC = [ - # Carbon Black - ("dir", "sysvol/ProgramData/CarbonBlack/Logs"), - ] - - -@register_module("--history") -class History(Module): - DESC = "browser history from IE, Edge, Firefox, and Chrome" - DIR_COMBINATIONS = namedtuple("DirCombinations", ["root_dirs", "dir_extensions", "history_files"]) - COMMON_DIR_COMBINATIONS = [ - DIR_COMBINATIONS( - [ - # Chromium - RHEL/Ubuntu - DNF/apt - ".config/chromium", - # Chrome - RHEL/Ubuntu - DNF - ".config/google-chrome", - # Edge - RHEL/Ubuntu - DNF/apt - ".config/microsoft-edge", - # Chrome - RHEL/Ubuntu - Flatpak - ".var/app/com.google.Chrome/config/google-chrome", - # Edge - RHEL/Ubuntu - Flatpak - ".var/app/com.microsoft.Edge/config/microsoft-edge", - # Chromium - RHEL/Ubuntu - Flatpak - ".var/app/org.chromium.Chromium/config/chromium", - # Chrome - "AppData/Local/Google/Chrom*/User Data", - # Edge - "AppData/Local/Microsoft/Edge/User Data", - "Library/Application Support/Microsoft Edge", - "Local Settings/Application Data/Microsoft/Edge/User Data", - # Chrome - Legacy - "Library/Application Support/Chromium", - "Library/Application Support/Google/Chrome", - "Local Settings/Application Data/Google/Chrom*/User Data", - # Chromium - RHEL/Ubuntu - snap - "snap/chromium/common/chromium", - # Brave - Windows - "AppData/Local/BraveSoftware/Brave-Browser/User Data", - "AppData/Roaming/BraveSoftware/Brave-Browser/User Data", - # Brave - Linux - ".config/BraveSoftware", - # Brave - MacOS - "Library/Application Support/BraveSoftware", - ], - ["*", "Snapshots/*/*"], - [ - "Archived History", - "Bookmarks", - "Cookies*", - "Network", - "Current Session", - "Current Tabs", - "Extension Cookies", - "Favicons", - "History", - "Last Session", - "Last Tabs", - "Login Data", - "Login Data For Account", - "Media History", - "Shortcuts", - "Snapshots", - "Top Sites", - "Web Data", - ], - ), - ] - - SPEC = [ - # IE - ("dir", "AppData/Local/Microsoft/Internet Explorer/Recovery", from_user_home), - ("dir", "AppData/Local/Microsoft/Windows/INetCookies", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/WebCache/*.dat", from_user_home), - # IE - index.dat - ("file", "Cookies/index.dat", from_user_home), - ("file", "Local Settings/History/History.IE5/index.dat", from_user_home), - ("glob", "Local Settings/History/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "Local Settings/Temporary Internet Files/Content.IE5/index.dat", from_user_home), - ("file", "Local Settings/Application Data/Microsoft/Feeds Cache/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/History/History.IE5/index.dat", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/History/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/index.dat", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Content.IE5/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Low/Content.IE5/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/Cookies/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/Cookies/Low/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/IEDownloadHistory/index.dat", from_user_home), - # Firefox - Windows - ("glob", "AppData/Local/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - ("glob", "AppData/Roaming/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - ("glob", "Application Data/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - # Firefox - macOS - ("glob", "/Users/*/Library/Application Support/Firefox/Profiles/*/*.sqlite*"), - # Firefox - RHEL/Ubuntu - Flatpak - ("glob", ".var/app/org.mozilla.firefox/.mozilla/firefox/*/*.sqlite*", from_user_home), - # Firefox - RHEL/Ubuntu - DNF/apt - ("glob", ".mozilla/firefox/*/*.sqlite*", from_user_home), - # Firefox - RHEL/Ubuntu - snap - ("glob", "snap/firefox/common/.mozilla/firefox/*/*.sqlite*", from_user_home), - # Safari - macOS - ("file", "Library/Safari/Bookmarks.plist", from_user_home), - ("file", "Library/Safari/Downloads.plist", from_user_home), - ("file", "Library/Safari/Extensions/Extensions.plist", from_user_home), - ("glob", "Library/Safari/History.*", from_user_home), - ("file", "Library/Safari/LastSession.plist", from_user_home), - ("file", "Library/Caches/com.apple.Safari/Cache.db", from_user_home), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - for root_dirs, extension_dirs, history_files in cls.COMMON_DIR_COMBINATIONS: - for root_dir, extension_dir, history_file in product(root_dirs, extension_dirs, history_files): - full_path = f"{root_dir}/{extension_dir}/{history_file}" - search_type = "glob" if "*" in full_path else "file" - - spec.add((search_type, full_path, from_user_home)) - - return spec - - -@register_module("--remoteaccess") -class RemoteAccess(Module): - DESC = "common remote access tools' log files" - SPEC = [ - # teamviewer - ("glob", "sysvol/Program Files/TeamViewer/*.log"), - ("glob", "sysvol/Program Files (x86)/TeamViewer/*.log"), - ("glob", "/var/log/teamviewer*/*.log"), - ("glob", "AppData/Roaming/TeamViewer/*.log", from_user_home), - ("glob", "Library/Logs/TeamViewer/*.log", from_user_home), - # anydesk - Windows - ("dir", "sysvol/ProgramData/AnyDesk"), - ("glob", "AppData/Roaming/AnyDesk/*.trace", from_user_home), - ("glob", "AppData/Roaming/AnyDesk/*/*.trace", from_user_home), - # anydesk - Mac + Linux - ("glob", ".anydesk*/*.trace", from_user_home), - ("file", "/var/log/anydesk.trace"), - # zoho - ("dir", "sysvol/ProgramData/ZohoMeeting/log"), - ("dir", "AppData/Local/ZohoMeeting/log", from_user_home), - # realvnc - ("file", "sysvol/ProgramData/RealVNC-Service/vncserver.log"), - ("file", "AppData/Local/RealVNC/vncserver.log", from_user_home), - # tightvnc - ("dir", "sysvol/ProgramData/TightVNC/Server/Logs"), - # Remote desktop cache files - ("dir", "AppData/Local/Microsoft/Terminal Server Client/Cache", from_user_home), - ] - - -@register_module("--webhosting") -class WebHosting(Module): - DESC = "Web hosting software log files" - SPEC = [ - # cPanel - ("dir", "/usr/local/cpanel/logs"), - ("file", ".lastlogin", from_user_home), - ] - - -@register_module("--wer") -class WER(Module): - DESC = "WER (Windows Error Reporting) related files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - for wer_dir in itertools.chain( - ["sysvol/ProgramData/Microsoft/Windows/WER"], - from_user_home(target, "AppData/Local/Microsoft/Windows/WER"), - ): - for path in target.fs.path(wer_dir).rglob("*"): - if not path.is_file(): - continue - - if path.stat().st_size >= (1024 * 1024 * 1024): # 1GB - log.debug("Skipping WER file because it exceeds 1GB: %s", path) - continue - - spec.add(("file", path)) - - return spec - - -@register_module("--etc") -class Etc(Module): - SPEC = [ - # In OS-X /etc is a symlink to /private/etc. To prevent collecting - # duplicates, we only use the /etc directory here. - ("dir", "/etc"), - ("dir", "/usr/local/etc"), - ] - - -@register_module("--boot") -class Boot(Module): - SPEC = [ - ("glob", "/boot/config*"), - ("glob", "/boot/efi*"), - ("glob", "/boot/grub*"), - ("glob", "/boot/init*"), - ("glob", "/boot/system*"), - ] - - -def private_key_filter(path: fsutil.TargetPath) -> bool: - if path.is_file() and not path.is_symlink(): - with path.open("rt") as file: - return "PRIVATE KEY" in file.readline() - - -@register_module("--home") -class Home(Module): - SPEC = [ - # Catches most shell related configuration files - ("glob", ".*[akz]sh*", from_user_home), - ("glob", "*/.*[akz]sh*", from_user_home), - # Added to catch any shell related configuration file not caught with the above glob - ("glob", ".*history", from_user_home), - ("glob", "*/.*history", from_user_home), - ("glob", ".*rc", from_user_home), - ("glob", "*/.*rc", from_user_home), - ("glob", ".*_logout", from_user_home), - ("glob", "*/.*_logout", from_user_home), - # Miscellaneous configuration files - ("dir", ".config", from_user_home), - ("glob", "*/.config", from_user_home), - ("file", ".wget-hsts", from_user_home), - ("glob", "*/.wget-hsts", from_user_home), - ("file", ".gitconfig", from_user_home), - ("glob", "*/.gitconfig", from_user_home), - ("file", ".selected_editor", from_user_home), - ("glob", "*/.selected_editor", from_user_home), - ("file", ".viminfo", from_user_home), - ("glob", "*/.viminfo", from_user_home), - ("file", ".lesshist", from_user_home), - ("glob", "*/.lesshist", from_user_home), - ("file", ".profile", from_user_home), - ("glob", "*/.profile", from_user_home), - # OS-X home (aka /Users) - ("glob", ".bash_sessions/*", from_user_home), - ("glob", "Library/LaunchAgents/*", from_user_home), - ("glob", "Library/Logs/*", from_user_home), - ("glob", "Preferences/*", from_user_home), - ("glob", "Library/Preferences/*", from_user_home), - ] - - -@register_module("--ssh") -@module_arg("--private-keys", action=argparse.BooleanOptionalAction, help="Add any private keys") -class SSH(Module): - SPEC = [ - ("glob", ".ssh/*", from_user_home), - ("glob", "/etc/ssh/*"), - ("glob", "sysvol/ProgramData/ssh/*"), - ] - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - # Acquire SSH configuration in sshd directories - - filter = None if cli_args.private_keys else private_key_filter - - if filter: - log.info("Executing SSH without --private-keys, skipping private keys.") - - with collector.file_filter(filter): - super().run(target, cli_args, collector) - - -@register_module("--var") -class Var(Module): - SPEC = [ - # In OS-X /var is a symlink to /private/var. To prevent collecting - # duplicates, we only use the /var directory here. - ("dir", "/var/log"), - ("dir", "/var/spool/at"), - ("dir", "/var/spool/cron"), - ("dir", "/var/spool/anacron"), - ("dir", "/var/lib/dpkg/status"), - ("dir", "/var/lib/rpm"), - ("dir", "/var/db"), - ("dir", "/var/audit"), - ("dir", "/var/cron"), - ("dir", "/var/run"), - # some OS-X specific files - ("dir", "/private/var/at"), - ("dir", "/private/var/db/diagnostics"), - ("dir", "/private/var/db/uuidtext"), - ("file", "/private/var/vm/sleepimage"), - ("glob", "/private/var/vm/swapfile*"), - ("glob", "/private/var/folders/*/*/0/com.apple.notificationcenter/*/*"), - # user specific cron on OS-X - ("dir", "/usr/lib/cron"), - ] - - -@register_module("--bsd") -class BSD(Module): - SPEC = [ - ("file", "/bin/freebsd-version"), - ("dir", "/usr/ports"), - ] - - -@register_module("--osx") -class OSX(Module): - DESC = "OS-X specific files and directories" - SPEC = [ - # filesystem events - ("dir", "/.fseventsd"), - # kernel extensions - ("dir", "/Library/Extensions"), - ("dir", "/System/Library/Extensions"), - # logs - ("dir", "/Library/Logs"), - # autorun locations - ("dir", "/Library/LaunchAgents"), - ("dir", "/Library/LaunchDaemons"), - ("dir", "/Library/StartupItems"), - ("dir", "/System/Library/LaunchAgents"), - ("dir", "/System/Library/LaunchDaemons"), - ("dir", "/System/Library/StartupItems"), - # installed software - ("dir", "/Library/Receipts/InstallHistory.plist"), - ("file", "/System/Library/CoreServices/SystemVersion.plist"), - # system preferences - ("dir", "/Library/Preferences"), - # DHCP settings - ("dir", "/private/var/db/dhcpclient/leases"), - ] - - -@register_module("--osx-applications-info") -class OSXApplicationsInfo(Module): - DESC = "OS-X info.plist from all installed applications" - SPEC = [ - ("glob", "/Applications/*/Contents/Info.plist"), - ("glob", "Applications/*/Contents/Info.plist", from_user_home), - ] - - -@register_module("--bootbanks") -class Bootbanks(Module): - DESC = "ESXi bootbanks" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - # Both ESXi 6 and 7 compatible - boot_dirs = { - "boot": "BOOT", - "bootbank": "BOOTBANK1", - "altbootbank": "BOOTBANK2", - } - boot_fs = {} - - for boot_dir, boot_vol in boot_dirs.items(): - dir_path = target.fs.path(boot_dir) - if dir_path.is_symlink() and dir_path.exists(): - dst = dir_path.readlink() - fs = dst.get().top.fs - boot_fs[fs] = boot_vol - - for fs, mountpoint, uuid, _ in iter_esxi_filesystems(target): - if fs in boot_fs: - name = boot_fs[fs] - log.info("Acquiring %s (%s)", mountpoint, name) - mountpoint_len = len(mountpoint) - base = f"fs/{uuid}:{name}" - for path in target.fs.path(mountpoint).rglob("*"): - outpath = path.as_posix()[mountpoint_len:] - collector.collect_path(path, outpath=outpath, base=base) - - -@register_module("--esxi") -class ESXi(Module): - DESC = "ESXi interesting files" - SPEC = [ - ("dir", "/scratch/log"), - ("dir", "/locker/packages/var"), - # ESXi 7 - ("dir", "/scratch/cache"), - ("dir", "/scratch/vmkdump"), - # ESXi 6 - ("dir", "/scratch/vmware"), - ] - - -@register_module("--vmfs") -class VMFS(Module): - DESC = "ESXi VMFS metadata files" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - for fs, mountpoint, uuid, name in iter_esxi_filesystems(target): - if not fs.__type__ == "vmfs": - continue - - log.info("Acquiring %s (%s)", mountpoint, name) - mountpoint_len = len(mountpoint) - base = f"fs/{uuid}:{name}" - for path in target.fs.path(mountpoint).glob("*.sf"): - outpath = path.as_posix()[mountpoint_len:] - collector.collect_path(path, outpath=outpath, base=base) - - -@register_module("--activities-cache") -class ActivitiesCache(Module): - DESC = "user's activities caches" - SPEC = [ - ("dir", "AppData/Local/ConnectedDevicesPlatform", from_user_home), - ] - - -@register_module("--hashes") -@module_arg( - "--hash-func", - action="append", - type=HashFunc, - choices=[h.value for h in HashFunc], - help="Hash function to use", -) -@module_arg("--dir-to-hash", action="append", help="Hash only files in a provided directory") -@module_arg("--ext-to-hash", action="append", help="Hash only files with the extensions provided") -@module_arg("--glob-to-hash", action="append", help="Hash only files that match provided glob") -class FileHashes(Module): - DESC = "file hashes" - - DEFAULT_HASH_FUNCS = (HashFunc.MD5, HashFunc.SHA1, HashFunc.SHA256) - DEFAULT_EXTENSIONS = ( - "bat", - "cmd", - "com", - "dll", - "exe", - "installlog", - "installutil", - "js", - "lnk", - "ps1", - "sys", - "tlb", - "vbs", - ) - DEFAULT_PATHS = ("sysvol/Windows/",) - - MAX_FILE_SIZE_BYTES = 100 * 1024 * 1024 # 100MB - - DEFAULT_FILE_FILTERS = ( - functools.partial(filter_out_by_path_match, re_pattern="^/(sysvol/)?Windows/WinSxS/"), - functools.partial(filter_out_huge_files, max_size_bytes=MAX_FILE_SIZE_BYTES), - functools.partial(filter_out_by_value_match, value=b"MZ", offsets=[0, 3]), - ) - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - log.info("*** Acquiring file hashes") - - specs = cls.get_specs(cli_args) - - with collector.bind_module(cls): - start = time.time() - - path_hashes = collect_hashes(target, specs, path_filters=cls.DEFAULT_FILE_FILTERS) - rows_count, csv_compressed_bytes = serialize_into_csv(path_hashes, compress=True) - - collector.write_bytes( - f"{collector.base}/{collector.METADATA_BASE}/file-hashes.csv.gz", - csv_compressed_bytes, - ) - log.info("Hashing is done, %s files processed in %.2f secs", rows_count, (time.time() - start)) - - @classmethod - def get_specs(cls, cli_args: argparse.Namespace) -> Iterator[tuple]: - path_selectors = [] - - if cli_args.ext_to_hash: - extensions = cli_args.ext_to_hash - else: - extensions = cls.DEFAULT_EXTENSIONS - - if cli_args.dir_to_hash or cli_args.glob_to_hash: - if cli_args.glob_to_hash: - path_selectors.extend([("glob", glob) for glob in cli_args.glob_to_hash]) - - if cli_args.dir_to_hash: - path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cli_args.dir_to_hash]) - - else: - path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cls.DEFAULT_PATHS]) - - if cli_args.hash_func: - hash_funcs = cli_args.hash_func - else: - hash_funcs = cls.DEFAULT_HASH_FUNCS - - return [(path_selector, hash_funcs) for path_selector in path_selectors] - - -@register_module("--handles") -@module_arg( - "--handle-types", - action="extend", - help="Collect only specified handle types", - type=NamedObjectType, - choices=[h.value for h in NamedObjectType], - nargs="*", -) -@local_module -class OpenHandles(Module): - DESC = "Open handles" - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - if not sys.platform == "win32": - log.error("Open Handles plugin can only run on Windows systems! Skipping...") - return - - from acquire.dynamic.windows.collect import collect_open_handles - from acquire.dynamic.windows.handles import serialize_handles_into_csv - - log.info("*** Acquiring open handles") - - handle_types = cli_args.handle_types - - with collector.bind_module(cls): - handles = collect_open_handles(handle_types) - csv_compressed_handles = serialize_handles_into_csv(handles) - - collector.write_bytes( - f"{collector.base}/{collector.METADATA_BASE}/open_handles.csv.gz", - csv_compressed_handles, - ) - log.info("Collecting open handles is done.") - - -def print_disks_overview(target: Target) -> None: - log.info("// Disks") - try: - for disk in target.disks: - log.info("%s", disk) - if not disk.vs: - continue - - for volume in disk.vs.volumes: - log.info("- %s", volume) - except Exception: - log.error("Failed to iterate disks") - log.info("") - - -def print_volumes_overview(target: Target) -> None: - log.info("// Volumes") - try: - for volume in target.volumes: - log.info("%s", volume) - except Exception: - log.error("Failed to iterate volumes") - log.info("") - - -def print_acquire_warning(target: Target) -> None: - if target.os != "windows": - log.warning("========================================== WARNING ==========================================") - log.warning("") - log.warning( - "The support for operating system '%s' is experimental. Some artifacts may not yet be included and some ", - target.os, - ) - log.warning("features may not work as expected. Please notify upstream for any missing artifacts or features.") - log.warning("") - log.warning("========================================== WARNING ==========================================") - - -def _add_modules_for_profile(choice: str, operating_system: str, profile: dict, msg: str) -> Optional[dict]: - modules_selected = dict() - - if choice and choice != "none": - profile_dict = profile[choice] - if operating_system not in profile_dict: - log.error(msg, operating_system, choice) - return None - - for mod in profile_dict[operating_system]: - modules_selected[mod.__modname__] = mod - - return modules_selected - - -def acquire_target(target: Target, args: argparse.Namespace, output_ts: Optional[str] = None) -> list[str | Path]: - acquire_gui = GUI() - files = [] - output_ts = output_ts or get_utc_now_str() - if args.log_to_dir: - log_file = args.log_path.joinpath(format_output_name("Unknown", output_ts, "log")) - # This will also rename the log file on disk, which was opened in main(), if the name is different - reconfigure_log_file(log, log_file, delay=True) - else: - log_file = args.log_path - - skip_list = set() - if log_file: - files.append(log_file) - if target.path.name == "local": - skip_list.add(normalize_path(target, log_file, resolve_parents=True, preserve_case=False)) - - print_disks_overview(target) - print_volumes_overview(target) - - if not target._os_plugin: - log.error("Error: Unable to detect OS") - return files - - hostname = "" - try: - hostname = target.hostname - except Exception: - log.exception("Failed to get hostname") - - version = None - try: - version = target.version - except Exception: - log.exception("Failed to detect OS version") - - if version is None: - os_plugin_name = target._os_plugin.__name__.lower() - version = f"{target.os} ({os_plugin_name})" - - log.info("Target name: %s", target.name) - log.info("Hostname: %s", hostname) - log.info("OS: %s", version) - log.info("") - - print_acquire_warning(target) - - modules_selected = {} - modules_successful = [] - modules_failed = {} - for name, mod in MODULES.items(): - name_slug = name.lower() - # check if module was set in the arguments provided - if getattr(args, name_slug): - modules_selected[name] = mod - - profile = args.profile - - # Set profile to default if no profile, modules, files, directories or globes were selected - if not profile and not modules_selected and not args.file and not args.directory and not args.glob: - log.info("Using default collection profile") - profile = "default" - log.info("") - - profile_modules = _add_modules_for_profile( - profile, target.os, PROFILES, "No collection set for OS %s with profile %s" - ) - - if not (volatile_profile := args.volatile_profile): - volatile_profile = "none" - - volatile_modules = _add_modules_for_profile( - volatile_profile, target.os, VOLATILE, "No collection set for OS %s with volatile profile %s" - ) - - if (profile_modules or volatile_modules) is None: - return files - - modules_selected.update(profile_modules) - modules_selected.update(volatile_modules) - - log.info("Modules selected: %s", ", ".join(sorted(modules_selected))) - - local_only_modules = {name: module for name, module in modules_selected.items() if hasattr(module, "__local__")} - if target.path.name != "local" and local_only_modules: - for name, module in local_only_modules.items(): - modules_failed[module.__name__] = "Not running on a local target" - log.error( - "Can not use local-only modules with non-local targets. Skipping: %s", - " ".join(sorted(local_only_modules.keys())), - ) - log.info("") - # Remove local-only modules from the modules list - modules_selected = dict(modules_selected.items() - local_only_modules.items()) - - log_file_handler = get_file_handler(log) - # Prepare log file and output file names - if log_file_handler and args.log_to_dir: - log_file = format_output_name(target.name, output_ts, "log") - # This will also rename the log file on disk, which was opened and written previously. - log_file_handler.set_filename(log_file) - log_path = Path(log_file_handler.baseFilename).resolve() - log.info("Logging to file %s", log_path) - files = [log_file_handler.baseFilename] - if target.path.name == "local": - skip_list = {normalize_path(target, log_path, resolve_parents=True, preserve_case=False)} - - output_path = args.output or args.output_file - if output_path.is_dir(): - output_dir = format_output_name(target.name, output_ts) - output_path = output_path.joinpath(output_dir) - output_path = output_path.resolve() - - output = OUTPUTS[args.output_type]( - output_path, - compress=args.compress, - compression_method=args.compress_method, - encrypt=args.encrypt, - public_key=args.public_key, - ) - files.append(output.path) - if target.path.name == "local": - skip_list.add(normalize_path(target, output.path, resolve_parents=True, preserve_case=False)) - - log.info("Writing output to %s", output.path) - if skip_list: - log.info("Skipping own files: %s", ", ".join(skip_list)) - log.info("") - - dir_base = "fs" - if target.os != "windows": - dir_base = "fs/$rootfs$" - - with Collector(target, output, base=dir_base, skip_list=skip_list) as collector: - # Acquire specified files - if args.file or args.directory or args.glob: - log.info("*** Acquiring specified paths") - spec = [] - - if args.file: - for path in args.file: - spec.append(("file", path.strip())) - - if args.directory: - for path in args.directory: - spec.append(("dir", path.strip())) - - if args.glob: - for path in args.glob: - spec.append(("glob", path.strip())) - - collector.collect(spec, module_name=CLI_ARGS_MODULE) - modules_successful.append(CLI_ARGS_MODULE) - log.info("") - - # Run modules (sort first based on execution order) - modules_selected = sorted(modules_selected.items(), key=lambda module: module[1].EXEC_ORDER) - count = 0 - for name, mod in modules_selected: - try: - mod.run(target, args, collector) - - modules_successful.append(mod.__name__) - except Exception: - log.error("Error while running module %s", name, exc_info=True) - modules_failed[mod.__name__] = get_formatted_exception() - - acquire_gui.progress = (acquire_gui.shard // len(modules_selected)) * count - count += 1 - - log.info("") - - collection_report = collector.report - - log.info("Done collecting artifacts:") - - # prepare and render full report only if logging level is more permissive than INFO - if log.level < logging.INFO: - log.debug(get_full_formatted_report(collection_report)) - - log.info(get_report_summary(collection_report)) - - if not args.disable_report: - collection_report_serialized = collection_report.get_records_per_module_per_outcome(serialize_records=True) - - execution_report = { - "target": str(target), - "name": target.name, - "timestamp": get_utc_now().isoformat(), - "modules-successful": modules_successful, - "modules-failed": modules_failed, - **collection_report_serialized, - } - - if args.output: - report_file_name = format_output_name(target.name, postfix=output_ts, ext="report.json") - else: - report_file_name = f"{output_path.name}.report.json" - - report_file_path = output_path.parent / report_file_name - persist_execution_report(report_file_path, execution_report) - - files.append(report_file_path) - log.info("Acquisition report for %s is written to %s", target, report_file_path) - - log.info("Output: %s", output.path) - return files - - -def upload_files(paths: list[str | Path], upload_plugin: UploaderPlugin, no_proxy: bool = False) -> None: - proxies = None if no_proxy else urllib.request.getproxies() - log.debug("Proxies: %s (no_proxy = %s)", proxies, no_proxy) - - log.info('Uploading files: "%s"', " ".join(map(str, paths))) - try: - upload_files_using_uploader(upload_plugin, paths, proxies) - except Exception: - log.error('Upload FAILED for files: "%s". See log file for details.', " ".join(map(str, paths))) - raise - else: - log.info("Upload succeeded.") - - -class WindowsProfile: - MINIMAL = [ - NTFS, - EventLogs, - Registry, - Tasks, - PowerShell, - Prefetch, - Appcompat, - PCA, - Misc, - Startup, - ] - DEFAULT = [ - *MINIMAL, - ETL, - Recents, - RecycleBin, - Drivers, - Syscache, - WBEM, - AV, - BITS, - DHCP, - DNS, - ActiveDirectory, - RemoteAccess, - ActivitiesCache, - ] - FULL = [ - *DEFAULT, - History, - NTDS, - QuarantinedFiles, - WindowsNotifications, - SSH, - IIS, - TextEditor, - ] - - -class LinuxProfile: - MINIMAL = [ - Etc, - Boot, - Home, - SSH, - Var, - ] - DEFAULT = MINIMAL - FULL = [ - *DEFAULT, - History, - WebHosting, - ] - - -class BsdProfile: - MINIMAL = [ - Etc, - Boot, - Home, - SSH, - Var, - BSD, - ] - DEFAULT = MINIMAL - FULL = MINIMAL - - -class ESXiProfile: - MINIMAL = [ - Bootbanks, - ESXi, - SSH, - ] - DEFAULT = [ - *MINIMAL, - VMFS, - ] - FULL = DEFAULT - - -class OSXProfile: - MINIMAL = [ - Etc, - Home, - Var, - OSX, - OSXApplicationsInfo, - ] - DEFAULT = MINIMAL - FULL = [ - *DEFAULT, - History, - SSH, - ] - - -PROFILES = { - "full": { - "windows": WindowsProfile.FULL, - "linux": LinuxProfile.FULL, - "bsd": BsdProfile.FULL, - "esxi": ESXiProfile.FULL, - "osx": OSXProfile.FULL, - }, - "default": { - "windows": WindowsProfile.DEFAULT, - "linux": LinuxProfile.DEFAULT, - "bsd": BsdProfile.DEFAULT, - "esxi": ESXiProfile.DEFAULT, - "osx": OSXProfile.DEFAULT, - }, - "minimal": { - "windows": WindowsProfile.MINIMAL, - "linux": LinuxProfile.MINIMAL, - "bsd": BsdProfile.MINIMAL, - "esxi": ESXiProfile.MINIMAL, - "osx": OSXProfile.MINIMAL, - }, - "none": None, -} - - -class VolatileProfile: - DEFAULT = [ - Netstat, - WinProcesses, - WinProcEnv, - WinArpCache, - WinRDPSessions, - WinDnsClientCache, - ] - EXTENSIVE = [ - Proc, - Sys, - ] - - -VOLATILE = { - "default": { - "windows": VolatileProfile.DEFAULT, - "linux": [], - "bsd": [], - "esxi": [], - "osx": [], - }, - "extensive": { - "windows": VolatileProfile.DEFAULT, - "linux": VolatileProfile.EXTENSIVE, - "bsd": VolatileProfile.EXTENSIVE, - "esxi": VolatileProfile.EXTENSIVE, - "osx": [], - }, - "none": None, -} - - -def exit_success(default_args: list[str]): - log.info("Acquire finished successful") - log.info("Arguments: %s", " ".join(sys.argv[1:])) - log.info("Default Arguments: %s", " ".join(default_args)) - log.info("Exiting with status code 0 (SUCCESS)") - sys.exit(0) - - -def exit_failure(default_args: list[str]): - log.error("Acquire FAILED") - log.error("Arguments: %s", " ".join(sys.argv[1:])) - log.error("Default Arguments: %s", " ".join(default_args)) - log.error("Exiting with status code 1 (FAILURE)") - sys.exit(1) - - -def main() -> None: - parser = create_argument_parser(PROFILES, VOLATILE, MODULES) - args, rest = parse_acquire_args(parser, config=CONFIG) - - # Since output has a default value, set it to None when output_file is defined - if args.output_file: - args.output = None - - try: - check_and_set_log_args(args) - except ValueError as err: - parser.exit(err) - - if args.log_to_dir: - # When args.upload files are specified, only these files are uploaded - # and no other action is done. Thus a log file specifically named - # Upload_.log is created - file_prefix = "Upload" if args.upload else "Unknown" - log_file = args.log_path.joinpath(format_output_name(file_prefix, args.start_time, "log")) - else: - log_file = args.log_path - - setup_logging(log, log_file, args.verbose, delay=args.log_delay) - - acquire_successful = True - files_to_upload = [log_file] - acquire_gui = None - try: - log.info(ACQUIRE_BANNER) - log.info("User: %s | Admin: %s", get_user_name(), is_user_admin()) - log.info("Arguments: %s", " ".join(sys.argv[1:])) - log.info("Default Arguments: %s", " ".join(args.config.get("arguments"))) - log.info("") - - # start GUI if requested through CLI / config - flavour = None - if args.gui == "always" or ( - args.gui == "depends" and os.environ.get("PYS_KEYSOURCE") == "prompt" and len(sys.argv) == 1 - ): - flavour = platform.system() - acquire_gui = GUI(flavour=flavour, upload_available=args.auto_upload) - - args.output, args.auto_upload, cancel = acquire_gui.wait_for_start(args) - if cancel: - log.info("Acquire cancelled") - exit_success(args.config.get("arguments")) - # From here onwards, the GUI will be locked and cannot be closed because we're acquiring - - plugins_to_load = [("cloud", MinIO)] - upload_plugins = UploaderRegistry("acquire.plugins", plugins_to_load) - - check_and_set_acquire_args(args, upload_plugins) - - if args.upload: - try: - upload_files(args.upload, args.upload_plugin, args.no_proxy) - except Exception as err: - acquire_gui.message("Failed to upload files") - log.exception(err) - exit_failure(args.config.get("arguments")) - exit_success(args.config.get("arguments")) - - target_paths = [] - for target_path in args.targets: - target_path = args_to_uri([target_path], args.loader, rest)[0] if args.loader else target_path - if target_path == "local": - target_query = {} - if args.force_fallback: - target_query.update({"force-directory-fs": 1}) - - if args.fallback: - target_query.update({"fallback-to-directory-fs": 1}) - - target_query = urllib.parse.urlencode(target_query) - target_path = f"{target_path}?{target_query}" - target_paths.append(target_path) - - try: - target_name = "Unknown" # just in case open_all already fails - for target in Target.open_all(target_paths): - target_name = "Unknown" # overwrite previous target name - target_name = target.name - log.info("Loading target %s", target_name) - log.info(target) - if target.os == "esxi" and target.name == "local": - # Loader found that we are running on an esxi host - # Perform operations to "enhance" memory - with esxi_memory_context_manager(): - files_to_upload = acquire_children_and_targets(target, args) - else: - files_to_upload = acquire_children_and_targets(target, args) - except Exception: - log.error("Failed to acquire target: %s", target_name) - if not is_user_admin(): - log.error("Try re-running as administrator/root") - acquire_gui.message("This application must be run as administrator.") - raise - - files_to_upload = sort_files(files_to_upload) - - except Exception as err: - log.error("Acquiring artifacts FAILED") - log.exception(err) - acquire_successful = False - else: - log.info("Acquiring artifacts succeeded") - - try: - # The auto-upload of files is done at the very very end to make sure any - # logged exceptions are written to the log file before uploading. - # This means that any failures from this point on will not be part of the - # uploaded log files, they will be written to the logfile on disk though. - if args.auto_upload and args.upload_plugin and files_to_upload: - try: - log_file_handler = get_file_handler(log) - if log_file_handler: - log_file_handler.close() - - upload_files(files_to_upload, args.upload_plugin) - except Exception: - if acquire_gui: - acquire_gui.message("Failed to upload files") - raise - - if acquire_gui: - acquire_gui.finish() - acquire_gui.wait_for_quit() - - except Exception as err: - acquire_successful = False - log.exception(err) - - if acquire_successful: - exit_success(args.config.get("arguments")) - else: - exit_failure(args.config.get("arguments")) - - -def load_child(target: Target, child_path: Path) -> None: - log.info("") - log.info("Loading child target %s", child_path) - try: - child = target.open_child(child_path) - log.info(target) - except Exception: - log.exception("Failed to load child target") - raise - - return child - - -def acquire_children_and_targets(target: Target, args: argparse.Namespace) -> list[str | Path]: - if args.child: - target = load_child(target, args.child) - - log.info("") - - files = [] - acquire_gui = GUI() - - counter = 0 - progress_limit = 50 if args.auto_upload else 90 - total_targets = 0 - if args.children: - total_targets += len(list(target.list_children())) - - if (args.children and not args.skip_parent) or not args.children: - total_targets += 1 - counter += 1 - acquire_gui.shard = int((progress_limit / total_targets) * counter) - try: - files.extend(acquire_target(target, args, args.start_time)) - - except Exception: - log.error("Failed to acquire main target") - acquire_gui.message("Failed to acquire target") - acquire_gui.wait_for_quit() - raise - - if args.children: - for child in target.list_children(): - counter += 1 - acquire_gui.shard = int((progress_limit / total_targets) * counter) - try: - child_target = load_child(target, child.path) - except Exception: - continue - - log.info("") - - try: - child_files = acquire_target(child_target, args) - files.extend(child_files) - except Exception: - log.exception("Failed to acquire child target %s", child_target.name) - acquire_gui.message("Failed to acquire child target") - continue - - return files - - -def sort_files(files: list[Union[str, Path]]) -> list[Path]: - log_files: list[Path] = [] - tar_paths: list[Path] = [] - report_paths: list[Path] = [] - - suffix_map = {".log": log_files, ".json": report_paths} - - for file in files: - if isinstance(file, str): - file = Path(file) - - suffix_map.get(file.suffix, tar_paths).append(file) - - # Reverse log paths, as the first one in ``files`` is the main one. - log_files.reverse() - - return tar_paths + report_paths + log_files - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - sys.exit(1) - except Exception: - sys.exit(1) +from __future__ import annotations + +import argparse +import enum +import functools +import io +import itertools +import logging +import os +import platform +import shutil +import subprocess +import sys +import time +import urllib.parse +import urllib.request +from collections import defaultdict, namedtuple +from itertools import product +from pathlib import Path +from typing import BinaryIO, Callable, Iterator, Optional, Union + +from dissect.target import Target +from dissect.target.filesystem import Filesystem +from dissect.target.filesystems import ntfs +from dissect.target.helpers import fsutil +from dissect.target.plugins.apps.webserver import iis +from dissect.target.plugins.os.windows.log import evt, evtx +from dissect.target.tools.utils import args_to_uri +from dissect.util.stream import RunlistStream + +from acquire.collector import Collector, get_full_formatted_report, get_report_summary +from acquire.dynamic.windows.named_objects import NamedObjectType +from acquire.dynamic.windows.arp import ( + NetAdapter, + NetNeighbor, + get_windows_network_adapters, + get_windows_net_neighbors, + format_net_neighbors_list +) +from acquire.dynamic.windows.netstat import ( + NetConnection, + get_active_connections, + format_net_connections_list +) +from acquire.esxi import esxi_memory_context_manager +from acquire.gui import GUI +from acquire.hashes import ( + HashFunc, + collect_hashes, + filter_out_by_path_match, + filter_out_by_value_match, + filter_out_huge_files, + serialize_into_csv, +) +from acquire.log import get_file_handler, reconfigure_log_file, setup_logging +from acquire.outputs import OUTPUTS +from acquire.uploaders.minio import MinIO +from acquire.uploaders.plugin import UploaderPlugin, upload_files_using_uploader +from acquire.uploaders.plugin_registry import UploaderRegistry +from acquire.utils import ( + check_and_set_acquire_args, + check_and_set_log_args, + create_argument_parser, + format_output_name, + get_formatted_exception, + get_user_name, + get_utc_now, + get_utc_now_str, + is_user_admin, + normalize_path, + parse_acquire_args, + persist_execution_report, +) + +try: + from acquire.version import version +except ImportError: + version = "0.0.dev" + +try: + # Injected by pystandalone builder + from acquire.config import CONFIG +except ImportError: + CONFIG = defaultdict(lambda: None) + + +VERSION = version +ACQUIRE_BANNER = r""" + _ + __ _ ___ __ _ _ _(_)_ __ ___ + / _` |/ __/ _` | | | | | '__/ _ \ +| (_| | (_| (_| | |_| | | | | __/ + \__,_|\___\__, |\__,_|_|_| \___| + by Fox-IT |_| v{} + part of NCC Group +""".format( + VERSION +)[ + 1: +] + +MODULES = {} +MODULE_LOOKUP = {} + +CLI_ARGS_MODULE = "cli-args" + +log = logging.getLogger("acquire") +log.propagate = 0 +log_file_handler = None +logging.lastResort = None +logging.raiseExceptions = False + + +def misc_windows_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + misc_dirs = { + ("Windows/ServiceProfiles/LocalService", False), + ("Windows/ServiceProfiles/NetworkService", False), + ("Windows/System32/config/systemprofile", False), + ("Users", True), + ("Documents and Settings", True), + } + + for fs in target.fs.path().iterdir(): + if fs.name.lower() == "c:": + continue + + for misc_dir, get_subdirs in misc_dirs: + misc_path = fs.joinpath(misc_dir) + + if not misc_path.exists(): + continue + + if get_subdirs: + for entry in misc_path.iterdir(): + if entry.is_dir(): + yield entry + else: + yield misc_path + + +def misc_unix_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + user_dirs = ["root", "home/*"] + + home_dirs = (target.fs.path("/").glob(path) for path in user_dirs) + for home_dir in itertools.chain.from_iterable(home_dirs): + yield home_dir + + +def misc_osx_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + for homedir in itertools.chain(target.fs.path("/Users/").glob("*"), misc_unix_user_homes(target)): + yield homedir + + +MISC_MAPPING = { + "osx": misc_osx_user_homes, + "windows": misc_windows_user_homes, +} + + +def from_user_home(target: Target, path: str) -> Iterator[str]: + try: + for user_details in target.user_details.all_with_home(): + yield user_details.home_path.joinpath(path).as_posix() + except Exception as e: + log.warning("Error occurred when requesting all user homes") + log.debug("", exc_info=e) + + misc_user_homes = MISC_MAPPING.get(target.os, misc_unix_user_homes) + for user_dir in misc_user_homes(target): + yield user_dir.joinpath(path).as_posix() + + +def iter_ntfs_filesystems(target: Target) -> Iterator[tuple[ntfs.NtfsFilesystem, Optional[str], str, str]]: + mount_lookup = defaultdict(list) + for mount, fs in target.fs.mounts.items(): + mount_lookup[fs].append(mount) + + for fs in target.filesystems: + # The attr check is needed to correctly collect fake NTFS filesystems + # where the MFT etc. are added to a VirtualFilesystem. This happens for + # instance when the target is an acquired tar target. + if not isinstance(fs, ntfs.NtfsFilesystem) and not hasattr(fs, "ntfs"): + log.warning("Skipping %s - not an NTFS filesystem", fs) + continue + + if fs in mount_lookup: + mountpoints = mount_lookup[fs] + + for main_mountpoint in mountpoints: + if main_mountpoint != "sysvol": + break + + name = main_mountpoint + mountpoints = ", ".join(mountpoints) + else: + main_mountpoint = None + name = f"vol-{fs.ntfs.serial:x}" + mountpoints = "No mounts" + log.warning("Unmounted NTFS filesystem found %s (%s)", fs, name) + + yield fs, main_mountpoint, name, mountpoints + + +def iter_esxi_filesystems(target: Target) -> Iterator[tuple[Filesystem, str, str, Optional[str]]]: + for mount, fs in target.fs.mounts.items(): + if not mount.startswith("/vmfs/volumes/"): + continue + + uuid = mount[len("/vmfs/volumes/") :] # strip /vmfs/volumes/ + name = None + if fs.__type__ == "fat": + name = fs.volume.name + elif fs.__type__ == "vmfs": + name = fs.vmfs.label + + yield fs, mount, uuid, name + + +def register_module(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: + def wrapper(module_cls: type[Module]) -> type[Module]: + name = module_cls.__name__ + + if name in MODULES: + raise ValueError( + f"Module name is already registered: registration for {module_cls} conflicts with {MODULES[name]}" + ) + + desc = module_cls.DESC or name + kwargs["help"] = f"acquire {desc}" + kwargs["action"] = argparse.BooleanOptionalAction + kwargs["dest"] = name.lower() + module_cls.__modname__ = name + + if not hasattr(module_cls, "__cli_args__"): + module_cls.__cli_args__ = [] + module_cls.__cli_args__.append((args, kwargs)) + + MODULES[name] = module_cls + return module_cls + + return wrapper + + +def module_arg(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: + def wrapper(module_cls: type[Module]) -> type[Module]: + if not hasattr(module_cls, "__cli_args__"): + module_cls.__cli_args__ = [] + module_cls.__cli_args__.append((args, kwargs)) + return module_cls + + return wrapper + + +def local_module(cls: type[object]) -> object: + """A decorator that sets property `__local__` on a module class to mark it for local target only""" + cls.__local__ = True + return cls + + +class ExecutionOrder(enum.IntEnum): + TOP = 0 + DEFAULT = 1 + BOTTOM = 2 + + +class Module: + DESC = None + SPEC = [] + EXEC_ORDER = ExecutionOrder.DEFAULT + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + desc = cls.DESC or cls.__name__.lower() + log.info("*** Acquiring %s", desc) + + with collector.bind_module(cls): + collector.collect(cls.SPEC) + + spec_ext = cls.get_spec_additions(target, cli_args) + if spec_ext: + collector.collect(list(spec_ext)) + + cls._run(target, cli_args, collector) + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + pass + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + pass + + +@register_module("--sys") +@local_module +class Sys(Module): + DESC = "Sysfs files (live systems only)" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + spec = [("dir", "/sys")] + collector.collect(spec, follow=False, volatile=True) + + +@register_module("--proc") +@local_module +class Proc(Module): + DESC = "Procfs files (live systems only)" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + spec = [("dir", "/proc")] + collector.collect(spec, follow=False, volatile=True) + + +@register_module("-n", "--ntfs") +class NTFS(Module): + DESC = "NTFS filesystem metadata" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): + log.info("Acquiring from %s as %s (%s)", fs, name, mountpoints) + + for filename in ("$MFT", "$Boot", "$Secure:$SDS"): + if main_mountpoint is not None: + path = fsutil.join(main_mountpoint, filename) + collector.collect_path(path) + + else: + # In case the NTFS filesystem is not mounted, which should not occur but + # iter_ntfs_filesystems allows for the possibility, we fall back to raw file + # collection. + collector.collect_file_raw(filename, fs, name) + + cls.collect_usnjrnl(collector, fs, name) + + @classmethod + def collect_usnjrnl(cls, collector: Collector, fs: Filesystem, name: str) -> None: + def usnjrnl_accessor(journal: BinaryIO) -> tuple[BinaryIO, int]: + # If the filesystem is a virtual NTFS filesystem, journal will be + # plain BinaryIO, not a RunlistStream. + if isinstance(journal, RunlistStream): + i = 0 + while journal.runlist[i][0] is None: + journal.seek(journal.runlist[i][1] * journal.block_size, io.SEEK_CUR) + i += 1 + size = journal.size - journal.tell() + else: + size = journal.size + + return (journal, size) + + collector.collect_file_raw( + "$Extend/$Usnjrnl:$J", + fs, + name, + file_accessor=usnjrnl_accessor, + ) + + +@register_module("-r", "--registry") +class Registry(Module): + DESC = "registry hives" + HIVES = ["drivers", "sam", "security", "software", "system", "default"] + SPEC = [ + ("dir", "sysvol/windows/system32/config/txr"), + ("dir", "sysvol/windows/system32/config/regback"), + ("glob", "sysvol/System Volume Information/_restore*/RP*/snapshot/_REGISTRY_*"), + ("glob", "ntuser.dat*", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/UsrClass.dat*", from_user_home), + ("glob", "Local Settings/Application Data/Microsoft/Windows/UsrClass.dat*", from_user_home), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + # Glob all hives to include e.g. .LOG files and .regtrans-ms files. + files = [] + for hive in cls.HIVES: + pattern = "sysvol/windows/system32/config/{}*".format(hive) + for entry in target.fs.path().glob(pattern): + if entry.is_file(): + files.append(("file", entry)) + return files + + +@register_module("--netstat") +@local_module +class Netstat(Module): + DESC = "Windows network connections" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + net_connections: list[NetConnection] = get_active_connections() + output = format_net_connections_list(net_connections) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "netstat") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) + + +@register_module("--win-processes") +@local_module +class WinProcesses(Module): + DESC = "Windows process list" + SPEC = [ + ("command", (["tasklist", "/V", "/fo", "csv"], "win-processes")), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--win-proc-env") +@local_module +class WinProcEnv(Module): + DESC = "Process environment variables" + SPEC = [ + ( + "command", + ( + ["PowerShell", "-command", "Get-Process | ForEach-Object {$_.StartInfo.EnvironmentVariables}"], + "win-process-env-vars", + ), + ), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--win-arp-cache") +@local_module +class WinArpCache(Module): + DESC = "ARP Cache" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + network_adapters: list[NetAdapter] = get_windows_network_adapters() + neighbors = get_windows_net_neighbors(network_adapters) + output = format_net_neighbors_list(neighbors) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "arp-cache") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["arp-cache"]) + + +@register_module("--win-rdp-sessions") +@local_module +class WinRDPSessions(Module): + DESC = "Windows Remote Desktop session information" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + # where.exe instead of where, just in case the client runs in PS instead of CMD + # by default where hides qwinsta on 32-bit systems because qwinsta is only 64-bit, but with recursive /R search + # we can still manage to find it and by passing the exact path Windows will launch a 64-bit process + # on systems capable of doing that. + qwinsta = subprocess.run( + ["where.exe", "/R", os.environ["WINDIR"], "qwinsta.exe"], capture_output=True, text=True + ).stdout.split("\n")[0] + return [ + ("command", ([qwinsta, "/VM"], "win-rdp-sessions")), + ] + + +@register_module("--winpmem") +@local_module +class WinMemDump(Module): + DESC = "Windows full memory dump" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + winpmem_file_name = "winpmem.exe" + winpmem_exec = shutil.which(winpmem_file_name) + + command_parts = [winpmem_exec, "-"] + + if winpmem_exec is None: + command_parts.pop(0) + command_parts.insert(0, winpmem_file_name) + collector.report.add_command_failed(cls.__name__, command_parts) + log.error( + "- Failed to collect output from command `%s`, program %s not found", + " ".join(command_parts), + winpmem_file_name, + ) + return + + else: + log.info("- Collecting output from command `%s`", " ".join(command_parts)) + + mem_dump_path = collector.output.path.with_name("winpmem") + mem_dump_errors_path = mem_dump_path.with_name("winpmem.errors") + + output_base = collector.COMMAND_OUTPUT_BASE + if collector.base: + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) + + mem_dump_output_path = fsutil.join(output_base, mem_dump_path.name) + mem_dump_errors_output_path = fsutil.join(output_base, mem_dump_errors_path.name) + + with mem_dump_path.open(mode="wb") as mem_dump_fh: + with mem_dump_errors_path.open(mode="wb") as mem_dump_errors_fh: + try: + # The shell parameter must be set to False, as otherwise the + # output from stdout is not piped into the filehandle. + # The check parameter must be set to False, as winpmem.exe + # always seems to exit with an error code, even on success. + subprocess.run( + bufsize=0, + args=command_parts, + stdout=mem_dump_fh, + stderr=mem_dump_errors_fh, + shell=False, + check=False, + ) + + except Exception: + collector.report.add_command_failed(cls.__name__, command_parts) + log.error( + "- Failed to collect output from command `%s`", + " ".join(command_parts), + exc_info=True, + ) + return + + collector.output.write_entry(mem_dump_output_path, mem_dump_path) + collector.output.write_entry(mem_dump_errors_output_path, mem_dump_errors_path) + collector.report.add_command_collected(cls.__name__, command_parts) + mem_dump_path.unlink() + mem_dump_errors_path.unlink() + + +@register_module("--winmem-files") +class WinMemFiles(Module): + DESC = "Windows memory files" + SPEC = [ + ("file", "sysvol/pagefile.sys"), + ("file", "sysvol/hiberfil.sys"), + ("file", "sysvol/swapfile.sys"), + ("file", "sysvol/windows/memory.dmp"), + ("dir", "sysvol/windows/minidump"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + page_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Memory Management" + for reg_key in target.registry.iterkeys(page_key): + for page_path in reg_key.value("ExistingPageFiles").value: + spec.add(("file", target.resolve(page_path))) + + crash_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\CrashControl" + for reg_key in target.registry.iterkeys(crash_key): + spec.add(("file", target.resolve(reg_key.value("DumpFile").value))) + spec.add(("dir", target.resolve(reg_key.value("MinidumpDir").value))) + + return spec + + +@register_module("-e", "--eventlogs") +class EventLogs(Module): + DESC = "event logs" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + evt_log_paths = evt.EvtPlugin(target).get_logs(filename_glob="*.evt") + for path in evt_log_paths: + spec.add(("file", path)) + evtx_log_paths = evtx.EvtxPlugin(target).get_logs(filename_glob="*.evtx") + for path in evtx_log_paths: + spec.add(("file", path)) + return spec + + +@register_module("-t", "--tasks") +class Tasks(Module): + SPEC = [ + ("dir", "sysvol/windows/tasks"), + ("dir", "sysvol/windows/system32/tasks"), + ("dir", "sysvol/windows/syswow64/tasks"), + ("dir", "sysvol/windows/sysvol/domain/policies"), + ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), + # Task Scheduler Service transaction log + ("file", "sysvol/SchedLgU.txt"), + ("file", "sysvol/windows/SchedLgU.txt"), + ("file", "sysvol/windows/tasks/SchedLgU.txt"), + ("file", "sysvol/winnt/tasks/SchedLgU.txt"), + ] + + +@register_module("-ad", "--active-directory") +class ActiveDirectory(Module): + DESC = "Active Directory data (policies, scripts, etc.)" + SPEC = [ + ("dir", "sysvol/windows/sysvol/domain"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters" + for reg_key in target.registry.iterkeys(key): + try: + spec.add(("dir", reg_key.value("SysVol").value)) + except Exception: + pass + return spec + + +@register_module("-nt", "--ntds") +class NTDS(Module): + SPEC = [ + ("dir", "sysvol/windows/NTDS"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + key = "HKLM\\SYSTEM\\CurrentControlSet\\services\\NTDS\\Parameters" + values = [ + ("dir", "DSA Working Directory"), + ("file", "DSA Database File"), + ("file", "Database backup path"), + ("dir", "Database log files path"), + ] + for reg_key in target.registry.iterkeys(key): + for collect_type, value in values: + path = reg_key.value(value).value + spec.add((collect_type, path)) + + return spec + + +@register_module("--etl") +class ETL(Module): + DESC = "interesting ETL files" + SPEC = [ + ("glob", "sysvol/Windows/System32/WDI/LogFiles/*.etl"), + ] + + +@register_module("--recents") +class Recents(Module): + DESC = "Windows recently used files artifacts" + SPEC = [ + ("dir", "AppData/Roaming/Microsoft/Windows/Recent", from_user_home), + ("dir", "AppData/Roaming/Microsoft/Office/Recent", from_user_home), + ("glob", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/*.lnk", from_user_home), + ("glob", "Desktop/*.lnk", from_user_home), + ("glob", "Recent/*.lnk", from_user_home), + ("glob", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/*.lnk"), + ] + + +@register_module("--startup") +class Startup(Module): + DESC = "Windows Startup folder" + SPEC = [ + ("dir", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/Startup"), + ("dir", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup", from_user_home), + ] + + +def recyclebin_filter(path: fsutil.TargetPath) -> bool: + return bool(path.stat().st_size >= (10 * 1024 * 1024)) # 10MB + + +@register_module("--recyclebin") +@module_arg( + "--large-files", + action=argparse.BooleanOptionalAction, + help="Collect files larger than 10MB in the Recycle Bin", +) +@module_arg( + "--data-files", + action=argparse.BooleanOptionalAction, + help="Collect the data files in the Recycle Bin", +) +class RecycleBin(Module): + DESC = "recycle bin metadata and data files" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + large_files_filter = None if cli_args.large_files else recyclebin_filter + + if large_files_filter: + log.info("Skipping files in Recycle Bin that are larger than 10MB.") + + patterns = ["$Recycle.bin/*/$I*", "Recycler/*/INFO2", "Recycled/INFO2"] + + if cli_args.data_files is None or cli_args.data_files: + patterns.extend(["$Recycle.Bin/$R*", "$Recycle.Bin/*/$R*", "RECYCLE*/D*"]) + + with collector.file_filter(large_files_filter): + for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): + log.info("Acquiring recycle bin from %s as %s (%s)", fs, name, mountpoints) + + for pattern in patterns: + if main_mountpoint is not None: + pattern = fsutil.join(main_mountpoint, pattern) + collector.collect_glob(pattern) + else: + # In case the NTFS filesystem is not mounted, which should not occur but + # iter_ntfs_filesystems allows for the possibility, we fall back to raw file + # collection. + for entry in fs.path().glob(pattern): + if entry.is_file(): + collector.collect_file_raw(fs, entry, name) + + +@register_module("--drivers") +class Drivers(Module): + DESC = "installed drivers" + SPEC = [ + ("glob", "sysvol/windows/system32/drivers/*.sys"), + ] + + +@register_module("--exchange") +class Exchange(Module): + DESC = "interesting Exchange configuration files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + key = "HKLM\\SOFTWARE\\Microsoft\\ExchangeServer" + for reg_key in target.registry.iterkeys(key): + for subkey in reg_key.subkeys(): + try: + setup_key = subkey.subkey("Setup") + install_path = setup_key.value("MsiInstallPath").value + spec.update( + [ + ( + "file", + f"{install_path}\\TransportRoles\\Agents\\agents.config", + ), + ( + "dir", + f"{install_path}\\Logging\\Ews", + ), + ( + "dir", + f"{install_path}\\Logging\\CmdletInfra\\Powershell-Proxy\\Cmdlet", + ), + ( + "dir", + f"{install_path}\\TransportRoles\\Logs", + ), + ] + ) + except Exception: + pass + return spec + + +@register_module("--iis") +class IIS(Module): + DESC = "IIS logs" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set( + [ + ("glob", "sysvol\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\Windows.old\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\inetpub\\logs\\LogFiles\\*.log"), + ("glob", "sysvol\\inetpub\\logs\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\Resources\\Directory\\*\\LogFiles\\Web\\W3SVC*\\*.log"), + ] + ) + iis_plugin = iis.IISLogsPlugin(target) + spec.update([("file", log_path) for _, log_path in iis_plugin.iter_log_format_path_pairs()]) + return spec + + +@register_module("--prefetch") +class Prefetch(Module): + DESC = "Windows Prefetch files" + SPEC = [ + ("dir", "sysvol/windows/prefetch"), + ] + + +@register_module("--appcompat") +class Appcompat(Module): + DESC = "Windows Amcache and RecentFileCache" + SPEC = [ + ("dir", "sysvol/windows/appcompat"), + ] + + +@register_module("--pca") +class PCA(Module): + DESC = "Windows Program Compatibility Assistant" + SPEC = [ + ("dir", "sysvol/windows/pca"), + ] + + +@register_module("--syscache") +class Syscache(Module): + DESC = "Windows Syscache hive and log files" + SPEC = [ + ("file", "sysvol/System Volume Information/Syscache.hve"), + ("glob", "sysvol/System Volume Information/Syscache.hve.LOG*"), + ] + + +@register_module("--win-notifications") +class WindowsNotifications(Module): + DESC = "Windows Push Notifications Database files." + SPEC = [ + # Old Win7/Win10 version of the file + ("file", "AppData/Local/Microsoft/Windows/Notifications/appdb.dat", from_user_home), + # New version of the file + ("file", "AppData/Local/Microsoft/Windows/Notifications/wpndatabase.db", from_user_home), + ] + + +@register_module("--bits") +class BITS(Module): + DESC = "Background Intelligent Transfer Service (BITS) queue/log DB" + SPEC = [ + # Pre-Win10 the BITS DB files are called qmgr[01].dat, in Win10 it is + # called qmgr.db and its transaction logs edb.log and edb.log[0-2] + # Win 2000/XP/2003 path + # (basically: \%ALLUSERSPROFILE%\Application Data\Microsoft\...) + ("glob", "sysvol/Documents and Settings/All Users/Application Data/Microsoft/Network/Downloader/qmgr*.dat"), + # Win Vista and higher path + # (basically: \%ALLUSERSPROFILE%\Microsoft\...; %ALLUSERSPROFILE% == %PROGRAMDATA%) + ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr*.dat"), + # Win 10 files + ("file", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr.db"), + ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/edb.log*"), + ] + + +@register_module("--wbem") +class WBEM(Module): + DESC = "Windows WBEM (WMI) database files" + SPEC = [ + ("dir", "sysvol/windows/system32/wbem/Repository"), + ] + + +@register_module("--dhcp") +class DHCP(Module): + DESC = "Windows Server DHCP files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\DhcpServer\\Parameters" + for reg_key in target.registry.iterkeys(key): + spec.add(("dir", reg_key.value("DatabasePath").value)) + return spec + + +@register_module("--dns") +class DNS(Module): + DESC = "Windows Server DNS files" + SPEC = [ + ("glob", "sysvol/windows/system32/config/netlogon.*"), + ("dir", "sysvol/windows/system32/dns"), + ] + + +@register_module("--win-dns-cache") +@local_module +class WinDnsClientCache(Module): + DESC = "The contents of Windows DNS client cache" + SPEC = [ + ( + "command", + # Powershell.exe understands a subcommand passed as single string parameter, + # no need to split the subcommand in parts. + ( + ["powershell.exe", "-Command", "Get-DnsClientCache | ConvertTo-Csv -NoTypeInformation"], + "get-dnsclientcache", + ), + ), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--powershell") +class PowerShell(Module): + DESC = "Windows PowerShell Artefacts" + SPEC = [ + ("dir", "AppData/Roaming/Microsoft/Windows/PowerShell", from_user_home), + ] + + +@register_module("--thumbnail-cache") +class ThumbnailCache(Module): + DESC = "Windows thumbnail db artifacts" + SPEC = [ + ("glob", "AppData/Local/Microsoft/Windows/Explorer/thumbcache_*", from_user_home), + ] + + +@register_module("--text-editor") +class TextEditor(Module): + DESC = "text editor (un)saved tab contents" + # Only Windows 11 notepad & Notepad++ tabs for now, but locations for other text editors may be added later. + SPEC = [ + ("dir", "AppData/Local/Packages/Microsoft.WindowsNotepad_8wekyb3d8bbwe/LocalState/TabState/", from_user_home), + ("dir", "AppData/Roaming/Notepad++/backup/", from_user_home), + ] + + +@register_module("--misc") +class Misc(Module): + DESC = "miscellaneous Windows artefacts" + SPEC = [ + ("file", "sysvol/windows/PFRO.log"), + ("file", "sysvol/windows/setupapi.log"), + ("file", "sysvol/windows/setupapidev.log"), + ("glob", "sysvol/windows/inf/setupapi*.log"), + ("glob", "sysvol/system32/logfiles/*/*.txt"), + ("dir", "sysvol/windows/system32/sru"), + ("dir", "sysvol/windows/system32/drivers/etc"), + ("dir", "sysvol/Windows/System32/WDI/LogFiles/StartupInfo"), + ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), + ("dir", "sysvol/ProgramData/Microsoft/Group Policy/History/"), + ("dir", "AppData/Local/Microsoft/Group Policy/History/", from_user_home), + ("glob", "sysvol/Windows/System32/LogFiles/SUM/*.mdb"), + ("glob", "sysvol/ProgramData/USOShared/Logs/System/*.etl"), + ("glob", "sysvol/Windows/Logs/WindowsUpdate/WindowsUpdate*.etl"), + ("glob", "sysvol/Windows/Logs/CBS/CBS*.log"), + ("dir", "sysvol/ProgramData/Microsoft/Search/Data/Applications/Windows"), + ("dir", "sysvol/Windows/SoftwareDistribution/DataStore"), + ] + + +@register_module("--av") +class AV(Module): + DESC = "various antivirus logs" + SPEC = [ + # AVG + ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/log"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/report"), + ("dir", "sysvol/ProgramData/AVG/Antivirus/log"), + ("dir", "sysvol/ProgramData/AVG/Antivirus/report"), + # Avast + ("dir", "sysvol/Documents And Settings/All Users/Application Data/Avast Software/Avast/Log"), + ("dir", "sysvol/ProgramData/Avast Software/Avast/Log"), + ("dir", "Avast Software/Avast/Log", from_user_home), + ("file", "sysvol/ProgramData/Avast Software/Avast/Chest/index.xml"), + # Avira + ("dir", "sysvol/ProgramData/Avira/Antivirus/LOGFILES"), + ("dir", "sysvol/ProgramData/Avira/Security/Logs"), + ("dir", "sysvol/ProgramData/Avira/VPN"), + # Bitdefender + ("dir", "sysvol/ProgramData/Bitdefender/Endpoint Security/Logs"), + ("dir", "sysvol/ProgramData/Bitdefender/Desktop/Profiles/Logs"), + ("glob", "sysvol/Program Files*/Bitdefender*/*"), + # ComboFix + ("file", "sysvol/ComboFix.txt"), + # Cybereason + ("dir", "sysvol/ProgramData/crs1/Logs"), + ("dir", "sysvol/ProgramData/apv2/Logs"), + ("dir", "sysvol/ProgramData/crb1/Logs"), + # Cylance + ("dir", "sysvol/ProgramData/Cylance/Desktop"), + ("dir", "sysvol/ProgramData/Cylance/Optics/Log"), + ("dir", "sysvol/Program Files/Cylance/Desktop/log"), + # ESET + ("dir", "sysvol/Documents and Settings/All Users/Application Data/ESET/ESET NOD32 Antivirus/Logs"), + ("dir", "sysvol/ProgramData/ESET/ESET NOD32 Antivirus/Logs"), + ("dir", "sysvol/ProgramData/ESET/ESET Security/Logs"), + ("dir", "sysvol/ProgramData/ESET/RemoteAdministrator/Agent/EraAgentApplicationData/Logs"), + ("dir", "sysvol/Windows/System32/config/systemprofile/AppData/Local/ESET/ESET Security/Quarantine"), + # Emsisoft + ("glob", "sysvol/ProgramData/Emsisoft/Reports/scan*.txt"), + # F-Secure + ("dir", "sysvol/ProgramData/F-Secure/Log"), + ("dir", "AppData/Local/F-Secure/Log", from_user_home), + ("dir", "sysvol/ProgramData/F-Secure/Antivirus/ScheduledScanReports"), + # HitmanPro + ("dir", "sysvol/ProgramData/HitmanPro/Logs"), + ("dir", "sysvol/ProgramData/HitmanPro.Alert/Logs"), + ("file", "sysvol/ProgramData/HitmanPro.Alert/excalibur.db"), + ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), + # Malwarebytes + ("glob", "sysvol/ProgramData/Malwarebytes/Malwarebytes Anti-Malware/Logs/mbam-log-*.xml"), + ("glob", "sysvol/ProgramData/Malwarebytes/MBAMService/logs/mbamservice.log*"), + ("dir", "AppData/Roaming/Malwarebytes/Malwarebytes Anti-Malware/Logs", from_user_home), + ("dir", "sysvol/ProgramData/Malwarebytes/MBAMService/ScanResults"), + # McAfee + ("dir", "Application Data/McAfee/DesktopProtection", from_user_home), + ("dir", "sysvol/ProgramData/McAfee/DesktopProtection"), + ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs"), + ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs_Old"), + ("dir", "sysvol/ProgramData/Mcafee/VirusScan"), + ("dir", "sysvol/ProgramData/McAfee/MSC/Logs"), + ("dir", "sysvol/ProgramData/McAfee/Agent/AgentEvents"), + ("dir", "sysvol/ProgramData/McAfee/Agent/logs"), + ("dir", "sysvol/ProgramData/McAfee/datreputation/Logs"), + ("dir", "sysvol/ProgramData/Mcafee/Managed/VirusScan/Logs"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Common Framework/AgentEvents"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/MCLOGS/SAE"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/datreputation/Logs"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Managed/VirusScan/Logs"), + ("dir", "sysvol/Program Files (x86)/McAfee/DLP/WCF Service/Log"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Apache2/Logs"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events/Debug"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Server/Logs"), + # RogueKiller + ("glob", "sysvol/ProgramData/RogueKiller/logs/AdliceReport_*.json"), + # SUPERAntiSpyware + ("dir", "AppData/Roaming/SUPERAntiSpyware/Logs", from_user_home), + # SecureAge + ("dir", "sysvol/ProgramData/SecureAge Technology/SecureAge/log"), + # SentinelOne + ("dir", "sysvol/programdata/sentinel/logs"), + # Sophos + ("glob", "sysvol/Documents and Settings/All Users/Application Data/Sophos/Sophos */Logs"), + ("glob", "sysvol/ProgramData/Sophos/Sophos */Logs"), + # Symantec + ( + "dir", + "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Logs/AV", + ), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Logs"), + ("dir", "AppData/Local/Symantec/Symantec Endpoint Protection/Logs", from_user_home), + ("dir", "sysvol/Windows/System32/winevt/logs/Symantec Endpoint Protection Client.evtx"), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/CmnClnt/ccSubSDK"), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/registrationInfo.xml"), + # TotalAV + ("glob", "sysvol/Program Files*/TotalAV/logs"), + ("dir", "sysvol/ProgramData/TotalAV/logs"), + # Trendmicro + ("glob", "sysvol/Program Files*/Trend Micro"), + # VIPRE + ("dir", "sysvol/ProgramData/VIPRE Business Agent/Logs"), + ("dir", "AppData/Roaming/VIPRE Business", from_user_home), + ("dir", "AppData/Roaming/GFI Software/AntiMalware/Logs", from_user_home), + ("dir", "AppData/Roaming/Sunbelt Software/AntiMalware/Logs", from_user_home), + # Webroot + ("file", "sysvol/ProgramData/WRData/WRLog.log"), + # Microsoft Windows Defender + ("dir", "sysvol/ProgramData/Microsoft/Microsoft AntiMalware/Support"), + ("glob", "sysvol/Windows/System32/winevt/Logs/Microsoft-Windows-Windows Defender*.evtx"), + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Support"), + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Scans/History/Service/DetectionHistory"), + ("file", "sysvol/Windows/Temp/MpCmdRun.log"), + ("file", "sysvol/Windows.old/Windows/Temp/MpCmdRun.log"), + ] + + +@register_module("--quarantined") +class QuarantinedFiles(Module): + DESC = "files quarantined by various antivirus products" + SPEC = [ + # Microsoft Defender + # https://knez.github.io/posts/how-to-extract-quarantine-files-from-windows-defender/ + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Quarantine"), + # Symantec Endpoint Protection + ( + "dir", + "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Quarantine", + ), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Quarantine"), + # Trend Micro + # https://secret.inf.ufpr.br/papers/marcus_av_handson.pdf + ("dir", "sysvol/ProgramData/Trend Micro/AMSP/quarantine"), + # McAfee + ("dir", "sysvol/Quarantine"), + ("dir", "sysvol/ProgramData/McAfee/VirusScan/Quarantine"), + # Sophos + ("glob", "sysvol/ProgramData/Sophos/Sophos/*/Quarantine"), + ("glob", "sysvol/ProgramData/Sophos/Sophos */INFECTED"), + ("dir", "sysvol/ProgramData/Sophos/Safestore"), + # HitmanPRO + ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), + ] + + +@register_module("--edr") +class EDR(Module): + DESC = "various Endpoint Detection and Response (EDR) logs" + SPEC = [ + # Carbon Black + ("dir", "sysvol/ProgramData/CarbonBlack/Logs"), + ] + + +@register_module("--history") +class History(Module): + DESC = "browser history from IE, Edge, Firefox, and Chrome" + DIR_COMBINATIONS = namedtuple("DirCombinations", ["root_dirs", "dir_extensions", "history_files"]) + COMMON_DIR_COMBINATIONS = [ + DIR_COMBINATIONS( + [ + # Chromium - RHEL/Ubuntu - DNF/apt + ".config/chromium", + # Chrome - RHEL/Ubuntu - DNF + ".config/google-chrome", + # Edge - RHEL/Ubuntu - DNF/apt + ".config/microsoft-edge", + # Chrome - RHEL/Ubuntu - Flatpak + ".var/app/com.google.Chrome/config/google-chrome", + # Edge - RHEL/Ubuntu - Flatpak + ".var/app/com.microsoft.Edge/config/microsoft-edge", + # Chromium - RHEL/Ubuntu - Flatpak + ".var/app/org.chromium.Chromium/config/chromium", + # Chrome + "AppData/Local/Google/Chrom*/User Data", + # Edge + "AppData/Local/Microsoft/Edge/User Data", + "Library/Application Support/Microsoft Edge", + "Local Settings/Application Data/Microsoft/Edge/User Data", + # Chrome - Legacy + "Library/Application Support/Chromium", + "Library/Application Support/Google/Chrome", + "Local Settings/Application Data/Google/Chrom*/User Data", + # Chromium - RHEL/Ubuntu - snap + "snap/chromium/common/chromium", + # Brave - Windows + "AppData/Local/BraveSoftware/Brave-Browser/User Data", + "AppData/Roaming/BraveSoftware/Brave-Browser/User Data", + # Brave - Linux + ".config/BraveSoftware", + # Brave - MacOS + "Library/Application Support/BraveSoftware", + ], + ["*", "Snapshots/*/*"], + [ + "Archived History", + "Bookmarks", + "Cookies*", + "Network", + "Current Session", + "Current Tabs", + "Extension Cookies", + "Favicons", + "History", + "Last Session", + "Last Tabs", + "Login Data", + "Login Data For Account", + "Media History", + "Shortcuts", + "Snapshots", + "Top Sites", + "Web Data", + ], + ), + ] + + SPEC = [ + # IE + ("dir", "AppData/Local/Microsoft/Internet Explorer/Recovery", from_user_home), + ("dir", "AppData/Local/Microsoft/Windows/INetCookies", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/WebCache/*.dat", from_user_home), + # IE - index.dat + ("file", "Cookies/index.dat", from_user_home), + ("file", "Local Settings/History/History.IE5/index.dat", from_user_home), + ("glob", "Local Settings/History/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "Local Settings/Temporary Internet Files/Content.IE5/index.dat", from_user_home), + ("file", "Local Settings/Application Data/Microsoft/Feeds Cache/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/History/History.IE5/index.dat", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/History/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/index.dat", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Content.IE5/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Low/Content.IE5/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/Cookies/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/Cookies/Low/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/IEDownloadHistory/index.dat", from_user_home), + # Firefox - Windows + ("glob", "AppData/Local/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + ("glob", "AppData/Roaming/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + ("glob", "Application Data/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + # Firefox - macOS + ("glob", "/Users/*/Library/Application Support/Firefox/Profiles/*/*.sqlite*"), + # Firefox - RHEL/Ubuntu - Flatpak + ("glob", ".var/app/org.mozilla.firefox/.mozilla/firefox/*/*.sqlite*", from_user_home), + # Firefox - RHEL/Ubuntu - DNF/apt + ("glob", ".mozilla/firefox/*/*.sqlite*", from_user_home), + # Firefox - RHEL/Ubuntu - snap + ("glob", "snap/firefox/common/.mozilla/firefox/*/*.sqlite*", from_user_home), + # Safari - macOS + ("file", "Library/Safari/Bookmarks.plist", from_user_home), + ("file", "Library/Safari/Downloads.plist", from_user_home), + ("file", "Library/Safari/Extensions/Extensions.plist", from_user_home), + ("glob", "Library/Safari/History.*", from_user_home), + ("file", "Library/Safari/LastSession.plist", from_user_home), + ("file", "Library/Caches/com.apple.Safari/Cache.db", from_user_home), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + for root_dirs, extension_dirs, history_files in cls.COMMON_DIR_COMBINATIONS: + for root_dir, extension_dir, history_file in product(root_dirs, extension_dirs, history_files): + full_path = f"{root_dir}/{extension_dir}/{history_file}" + search_type = "glob" if "*" in full_path else "file" + + spec.add((search_type, full_path, from_user_home)) + + return spec + + +@register_module("--remoteaccess") +class RemoteAccess(Module): + DESC = "common remote access tools' log files" + SPEC = [ + # teamviewer + ("glob", "sysvol/Program Files/TeamViewer/*.log"), + ("glob", "sysvol/Program Files (x86)/TeamViewer/*.log"), + ("glob", "/var/log/teamviewer*/*.log"), + ("glob", "AppData/Roaming/TeamViewer/*.log", from_user_home), + ("glob", "Library/Logs/TeamViewer/*.log", from_user_home), + # anydesk - Windows + ("dir", "sysvol/ProgramData/AnyDesk"), + ("glob", "AppData/Roaming/AnyDesk/*.trace", from_user_home), + ("glob", "AppData/Roaming/AnyDesk/*/*.trace", from_user_home), + # anydesk - Mac + Linux + ("glob", ".anydesk*/*.trace", from_user_home), + ("file", "/var/log/anydesk.trace"), + # zoho + ("dir", "sysvol/ProgramData/ZohoMeeting/log"), + ("dir", "AppData/Local/ZohoMeeting/log", from_user_home), + # realvnc + ("file", "sysvol/ProgramData/RealVNC-Service/vncserver.log"), + ("file", "AppData/Local/RealVNC/vncserver.log", from_user_home), + # tightvnc + ("dir", "sysvol/ProgramData/TightVNC/Server/Logs"), + # Remote desktop cache files + ("dir", "AppData/Local/Microsoft/Terminal Server Client/Cache", from_user_home), + ] + + +@register_module("--webhosting") +class WebHosting(Module): + DESC = "Web hosting software log files" + SPEC = [ + # cPanel + ("dir", "/usr/local/cpanel/logs"), + ("file", ".lastlogin", from_user_home), + ] + + +@register_module("--wer") +class WER(Module): + DESC = "WER (Windows Error Reporting) related files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + for wer_dir in itertools.chain( + ["sysvol/ProgramData/Microsoft/Windows/WER"], + from_user_home(target, "AppData/Local/Microsoft/Windows/WER"), + ): + for path in target.fs.path(wer_dir).rglob("*"): + if not path.is_file(): + continue + + if path.stat().st_size >= (1024 * 1024 * 1024): # 1GB + log.debug("Skipping WER file because it exceeds 1GB: %s", path) + continue + + spec.add(("file", path)) + + return spec + + +@register_module("--etc") +class Etc(Module): + SPEC = [ + # In OS-X /etc is a symlink to /private/etc. To prevent collecting + # duplicates, we only use the /etc directory here. + ("dir", "/etc"), + ("dir", "/usr/local/etc"), + ] + + +@register_module("--boot") +class Boot(Module): + SPEC = [ + ("glob", "/boot/config*"), + ("glob", "/boot/efi*"), + ("glob", "/boot/grub*"), + ("glob", "/boot/init*"), + ("glob", "/boot/system*"), + ] + + +def private_key_filter(path: fsutil.TargetPath) -> bool: + if path.is_file() and not path.is_symlink(): + with path.open("rt") as file: + return "PRIVATE KEY" in file.readline() + + +@register_module("--home") +class Home(Module): + SPEC = [ + # Catches most shell related configuration files + ("glob", ".*[akz]sh*", from_user_home), + ("glob", "*/.*[akz]sh*", from_user_home), + # Added to catch any shell related configuration file not caught with the above glob + ("glob", ".*history", from_user_home), + ("glob", "*/.*history", from_user_home), + ("glob", ".*rc", from_user_home), + ("glob", "*/.*rc", from_user_home), + ("glob", ".*_logout", from_user_home), + ("glob", "*/.*_logout", from_user_home), + # Miscellaneous configuration files + ("dir", ".config", from_user_home), + ("glob", "*/.config", from_user_home), + ("file", ".wget-hsts", from_user_home), + ("glob", "*/.wget-hsts", from_user_home), + ("file", ".gitconfig", from_user_home), + ("glob", "*/.gitconfig", from_user_home), + ("file", ".selected_editor", from_user_home), + ("glob", "*/.selected_editor", from_user_home), + ("file", ".viminfo", from_user_home), + ("glob", "*/.viminfo", from_user_home), + ("file", ".lesshist", from_user_home), + ("glob", "*/.lesshist", from_user_home), + ("file", ".profile", from_user_home), + ("glob", "*/.profile", from_user_home), + # OS-X home (aka /Users) + ("glob", ".bash_sessions/*", from_user_home), + ("glob", "Library/LaunchAgents/*", from_user_home), + ("glob", "Library/Logs/*", from_user_home), + ("glob", "Preferences/*", from_user_home), + ("glob", "Library/Preferences/*", from_user_home), + ] + + +@register_module("--ssh") +@module_arg("--private-keys", action=argparse.BooleanOptionalAction, help="Add any private keys") +class SSH(Module): + SPEC = [ + ("glob", ".ssh/*", from_user_home), + ("glob", "/etc/ssh/*"), + ("glob", "sysvol/ProgramData/ssh/*"), + ] + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + # Acquire SSH configuration in sshd directories + + filter = None if cli_args.private_keys else private_key_filter + + if filter: + log.info("Executing SSH without --private-keys, skipping private keys.") + + with collector.file_filter(filter): + super().run(target, cli_args, collector) + + +@register_module("--var") +class Var(Module): + SPEC = [ + # In OS-X /var is a symlink to /private/var. To prevent collecting + # duplicates, we only use the /var directory here. + ("dir", "/var/log"), + ("dir", "/var/spool/at"), + ("dir", "/var/spool/cron"), + ("dir", "/var/spool/anacron"), + ("dir", "/var/lib/dpkg/status"), + ("dir", "/var/lib/rpm"), + ("dir", "/var/db"), + ("dir", "/var/audit"), + ("dir", "/var/cron"), + ("dir", "/var/run"), + # some OS-X specific files + ("dir", "/private/var/at"), + ("dir", "/private/var/db/diagnostics"), + ("dir", "/private/var/db/uuidtext"), + ("file", "/private/var/vm/sleepimage"), + ("glob", "/private/var/vm/swapfile*"), + ("glob", "/private/var/folders/*/*/0/com.apple.notificationcenter/*/*"), + # user specific cron on OS-X + ("dir", "/usr/lib/cron"), + ] + + +@register_module("--bsd") +class BSD(Module): + SPEC = [ + ("file", "/bin/freebsd-version"), + ("dir", "/usr/ports"), + ] + + +@register_module("--osx") +class OSX(Module): + DESC = "OS-X specific files and directories" + SPEC = [ + # filesystem events + ("dir", "/.fseventsd"), + # kernel extensions + ("dir", "/Library/Extensions"), + ("dir", "/System/Library/Extensions"), + # logs + ("dir", "/Library/Logs"), + # autorun locations + ("dir", "/Library/LaunchAgents"), + ("dir", "/Library/LaunchDaemons"), + ("dir", "/Library/StartupItems"), + ("dir", "/System/Library/LaunchAgents"), + ("dir", "/System/Library/LaunchDaemons"), + ("dir", "/System/Library/StartupItems"), + # installed software + ("dir", "/Library/Receipts/InstallHistory.plist"), + ("file", "/System/Library/CoreServices/SystemVersion.plist"), + # system preferences + ("dir", "/Library/Preferences"), + # DHCP settings + ("dir", "/private/var/db/dhcpclient/leases"), + ] + + +@register_module("--osx-applications-info") +class OSXApplicationsInfo(Module): + DESC = "OS-X info.plist from all installed applications" + SPEC = [ + ("glob", "/Applications/*/Contents/Info.plist"), + ("glob", "Applications/*/Contents/Info.plist", from_user_home), + ] + + +@register_module("--bootbanks") +class Bootbanks(Module): + DESC = "ESXi bootbanks" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + # Both ESXi 6 and 7 compatible + boot_dirs = { + "boot": "BOOT", + "bootbank": "BOOTBANK1", + "altbootbank": "BOOTBANK2", + } + boot_fs = {} + + for boot_dir, boot_vol in boot_dirs.items(): + dir_path = target.fs.path(boot_dir) + if dir_path.is_symlink() and dir_path.exists(): + dst = dir_path.readlink() + fs = dst.get().top.fs + boot_fs[fs] = boot_vol + + for fs, mountpoint, uuid, _ in iter_esxi_filesystems(target): + if fs in boot_fs: + name = boot_fs[fs] + log.info("Acquiring %s (%s)", mountpoint, name) + mountpoint_len = len(mountpoint) + base = f"fs/{uuid}:{name}" + for path in target.fs.path(mountpoint).rglob("*"): + outpath = path.as_posix()[mountpoint_len:] + collector.collect_path(path, outpath=outpath, base=base) + + +@register_module("--esxi") +class ESXi(Module): + DESC = "ESXi interesting files" + SPEC = [ + ("dir", "/scratch/log"), + ("dir", "/locker/packages/var"), + # ESXi 7 + ("dir", "/scratch/cache"), + ("dir", "/scratch/vmkdump"), + # ESXi 6 + ("dir", "/scratch/vmware"), + ] + + +@register_module("--vmfs") +class VMFS(Module): + DESC = "ESXi VMFS metadata files" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + for fs, mountpoint, uuid, name in iter_esxi_filesystems(target): + if not fs.__type__ == "vmfs": + continue + + log.info("Acquiring %s (%s)", mountpoint, name) + mountpoint_len = len(mountpoint) + base = f"fs/{uuid}:{name}" + for path in target.fs.path(mountpoint).glob("*.sf"): + outpath = path.as_posix()[mountpoint_len:] + collector.collect_path(path, outpath=outpath, base=base) + + +@register_module("--activities-cache") +class ActivitiesCache(Module): + DESC = "user's activities caches" + SPEC = [ + ("dir", "AppData/Local/ConnectedDevicesPlatform", from_user_home), + ] + + +@register_module("--hashes") +@module_arg( + "--hash-func", + action="append", + type=HashFunc, + choices=[h.value for h in HashFunc], + help="Hash function to use", +) +@module_arg("--dir-to-hash", action="append", help="Hash only files in a provided directory") +@module_arg("--ext-to-hash", action="append", help="Hash only files with the extensions provided") +@module_arg("--glob-to-hash", action="append", help="Hash only files that match provided glob") +class FileHashes(Module): + DESC = "file hashes" + + DEFAULT_HASH_FUNCS = (HashFunc.MD5, HashFunc.SHA1, HashFunc.SHA256) + DEFAULT_EXTENSIONS = ( + "bat", + "cmd", + "com", + "dll", + "exe", + "installlog", + "installutil", + "js", + "lnk", + "ps1", + "sys", + "tlb", + "vbs", + ) + DEFAULT_PATHS = ("sysvol/Windows/",) + + MAX_FILE_SIZE_BYTES = 100 * 1024 * 1024 # 100MB + + DEFAULT_FILE_FILTERS = ( + functools.partial(filter_out_by_path_match, re_pattern="^/(sysvol/)?Windows/WinSxS/"), + functools.partial(filter_out_huge_files, max_size_bytes=MAX_FILE_SIZE_BYTES), + functools.partial(filter_out_by_value_match, value=b"MZ", offsets=[0, 3]), + ) + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + log.info("*** Acquiring file hashes") + + specs = cls.get_specs(cli_args) + + with collector.bind_module(cls): + start = time.time() + + path_hashes = collect_hashes(target, specs, path_filters=cls.DEFAULT_FILE_FILTERS) + rows_count, csv_compressed_bytes = serialize_into_csv(path_hashes, compress=True) + + collector.write_bytes( + f"{collector.base}/{collector.METADATA_BASE}/file-hashes.csv.gz", + csv_compressed_bytes, + ) + log.info("Hashing is done, %s files processed in %.2f secs", rows_count, (time.time() - start)) + + @classmethod + def get_specs(cls, cli_args: argparse.Namespace) -> Iterator[tuple]: + path_selectors = [] + + if cli_args.ext_to_hash: + extensions = cli_args.ext_to_hash + else: + extensions = cls.DEFAULT_EXTENSIONS + + if cli_args.dir_to_hash or cli_args.glob_to_hash: + if cli_args.glob_to_hash: + path_selectors.extend([("glob", glob) for glob in cli_args.glob_to_hash]) + + if cli_args.dir_to_hash: + path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cli_args.dir_to_hash]) + + else: + path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cls.DEFAULT_PATHS]) + + if cli_args.hash_func: + hash_funcs = cli_args.hash_func + else: + hash_funcs = cls.DEFAULT_HASH_FUNCS + + return [(path_selector, hash_funcs) for path_selector in path_selectors] + + +@register_module("--handles") +@module_arg( + "--handle-types", + action="extend", + help="Collect only specified handle types", + type=NamedObjectType, + choices=[h.value for h in NamedObjectType], + nargs="*", +) +@local_module +class OpenHandles(Module): + DESC = "Open handles" + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + if not sys.platform == "win32": + log.error("Open Handles plugin can only run on Windows systems! Skipping...") + return + + from acquire.dynamic.windows.collect import collect_open_handles + from acquire.dynamic.windows.handles import serialize_handles_into_csv + + log.info("*** Acquiring open handles") + + handle_types = cli_args.handle_types + + with collector.bind_module(cls): + handles = collect_open_handles(handle_types) + csv_compressed_handles = serialize_handles_into_csv(handles) + + collector.write_bytes( + f"{collector.base}/{collector.METADATA_BASE}/open_handles.csv.gz", + csv_compressed_handles, + ) + log.info("Collecting open handles is done.") + + +def print_disks_overview(target: Target) -> None: + log.info("// Disks") + try: + for disk in target.disks: + log.info("%s", disk) + if not disk.vs: + continue + + for volume in disk.vs.volumes: + log.info("- %s", volume) + except Exception: + log.error("Failed to iterate disks") + log.info("") + + +def print_volumes_overview(target: Target) -> None: + log.info("// Volumes") + try: + for volume in target.volumes: + log.info("%s", volume) + except Exception: + log.error("Failed to iterate volumes") + log.info("") + + +def print_acquire_warning(target: Target) -> None: + if target.os != "windows": + log.warning("========================================== WARNING ==========================================") + log.warning("") + log.warning( + "The support for operating system '%s' is experimental. Some artifacts may not yet be included and some ", + target.os, + ) + log.warning("features may not work as expected. Please notify upstream for any missing artifacts or features.") + log.warning("") + log.warning("========================================== WARNING ==========================================") + + +def _add_modules_for_profile(choice: str, operating_system: str, profile: dict, msg: str) -> Optional[dict]: + modules_selected = dict() + + if choice and choice != "none": + profile_dict = profile[choice] + if operating_system not in profile_dict: + log.error(msg, operating_system, choice) + return None + + for mod in profile_dict[operating_system]: + modules_selected[mod.__modname__] = mod + + return modules_selected + + +def acquire_target(target: Target, args: argparse.Namespace, output_ts: Optional[str] = None) -> list[str | Path]: + acquire_gui = GUI() + files = [] + output_ts = output_ts or get_utc_now_str() + if args.log_to_dir: + log_file = args.log_path.joinpath(format_output_name("Unknown", output_ts, "log")) + # This will also rename the log file on disk, which was opened in main(), if the name is different + reconfigure_log_file(log, log_file, delay=True) + else: + log_file = args.log_path + + skip_list = set() + if log_file: + files.append(log_file) + if target.path.name == "local": + skip_list.add(normalize_path(target, log_file, resolve_parents=True, preserve_case=False)) + + print_disks_overview(target) + print_volumes_overview(target) + + if not target._os_plugin: + log.error("Error: Unable to detect OS") + return files + + hostname = "" + try: + hostname = target.hostname + except Exception: + log.exception("Failed to get hostname") + + version = None + try: + version = target.version + except Exception: + log.exception("Failed to detect OS version") + + if version is None: + os_plugin_name = target._os_plugin.__name__.lower() + version = f"{target.os} ({os_plugin_name})" + + log.info("Target name: %s", target.name) + log.info("Hostname: %s", hostname) + log.info("OS: %s", version) + log.info("") + + print_acquire_warning(target) + + modules_selected = {} + modules_successful = [] + modules_failed = {} + for name, mod in MODULES.items(): + name_slug = name.lower() + # check if module was set in the arguments provided + if getattr(args, name_slug): + modules_selected[name] = mod + + profile = args.profile + + # Set profile to default if no profile, modules, files, directories or globes were selected + if not profile and not modules_selected and not args.file and not args.directory and not args.glob: + log.info("Using default collection profile") + profile = "default" + log.info("") + + profile_modules = _add_modules_for_profile( + profile, target.os, PROFILES, "No collection set for OS %s with profile %s" + ) + + if not (volatile_profile := args.volatile_profile): + volatile_profile = "none" + + volatile_modules = _add_modules_for_profile( + volatile_profile, target.os, VOLATILE, "No collection set for OS %s with volatile profile %s" + ) + + if (profile_modules or volatile_modules) is None: + return files + + modules_selected.update(profile_modules) + modules_selected.update(volatile_modules) + + log.info("Modules selected: %s", ", ".join(sorted(modules_selected))) + + local_only_modules = {name: module for name, module in modules_selected.items() if hasattr(module, "__local__")} + if target.path.name != "local" and local_only_modules: + for name, module in local_only_modules.items(): + modules_failed[module.__name__] = "Not running on a local target" + log.error( + "Can not use local-only modules with non-local targets. Skipping: %s", + " ".join(sorted(local_only_modules.keys())), + ) + log.info("") + # Remove local-only modules from the modules list + modules_selected = dict(modules_selected.items() - local_only_modules.items()) + + log_file_handler = get_file_handler(log) + # Prepare log file and output file names + if log_file_handler and args.log_to_dir: + log_file = format_output_name(target.name, output_ts, "log") + # This will also rename the log file on disk, which was opened and written previously. + log_file_handler.set_filename(log_file) + log_path = Path(log_file_handler.baseFilename).resolve() + log.info("Logging to file %s", log_path) + files = [log_file_handler.baseFilename] + if target.path.name == "local": + skip_list = {normalize_path(target, log_path, resolve_parents=True, preserve_case=False)} + + output_path = args.output or args.output_file + if output_path.is_dir(): + output_dir = format_output_name(target.name, output_ts) + output_path = output_path.joinpath(output_dir) + output_path = output_path.resolve() + + output = OUTPUTS[args.output_type]( + output_path, + compress=args.compress, + compression_method=args.compress_method, + encrypt=args.encrypt, + public_key=args.public_key, + ) + files.append(output.path) + if target.path.name == "local": + skip_list.add(normalize_path(target, output.path, resolve_parents=True, preserve_case=False)) + + log.info("Writing output to %s", output.path) + if skip_list: + log.info("Skipping own files: %s", ", ".join(skip_list)) + log.info("") + + dir_base = "fs" + if target.os != "windows": + dir_base = "fs/$rootfs$" + + with Collector(target, output, base=dir_base, skip_list=skip_list) as collector: + # Acquire specified files + if args.file or args.directory or args.glob: + log.info("*** Acquiring specified paths") + spec = [] + + if args.file: + for path in args.file: + spec.append(("file", path.strip())) + + if args.directory: + for path in args.directory: + spec.append(("dir", path.strip())) + + if args.glob: + for path in args.glob: + spec.append(("glob", path.strip())) + + collector.collect(spec, module_name=CLI_ARGS_MODULE) + modules_successful.append(CLI_ARGS_MODULE) + log.info("") + + # Run modules (sort first based on execution order) + modules_selected = sorted(modules_selected.items(), key=lambda module: module[1].EXEC_ORDER) + count = 0 + for name, mod in modules_selected: + try: + mod.run(target, args, collector) + + modules_successful.append(mod.__name__) + except Exception: + log.error("Error while running module %s", name, exc_info=True) + modules_failed[mod.__name__] = get_formatted_exception() + + acquire_gui.progress = (acquire_gui.shard // len(modules_selected)) * count + count += 1 + + log.info("") + + collection_report = collector.report + + log.info("Done collecting artifacts:") + + # prepare and render full report only if logging level is more permissive than INFO + if log.level < logging.INFO: + log.debug(get_full_formatted_report(collection_report)) + + log.info(get_report_summary(collection_report)) + + if not args.disable_report: + collection_report_serialized = collection_report.get_records_per_module_per_outcome(serialize_records=True) + + execution_report = { + "target": str(target), + "name": target.name, + "timestamp": get_utc_now().isoformat(), + "modules-successful": modules_successful, + "modules-failed": modules_failed, + **collection_report_serialized, + } + + if args.output: + report_file_name = format_output_name(target.name, postfix=output_ts, ext="report.json") + else: + report_file_name = f"{output_path.name}.report.json" + + report_file_path = output_path.parent / report_file_name + persist_execution_report(report_file_path, execution_report) + + files.append(report_file_path) + log.info("Acquisition report for %s is written to %s", target, report_file_path) + + log.info("Output: %s", output.path) + return files + + +def upload_files(paths: list[str | Path], upload_plugin: UploaderPlugin, no_proxy: bool = False) -> None: + proxies = None if no_proxy else urllib.request.getproxies() + log.debug("Proxies: %s (no_proxy = %s)", proxies, no_proxy) + + log.info('Uploading files: "%s"', " ".join(map(str, paths))) + try: + upload_files_using_uploader(upload_plugin, paths, proxies) + except Exception: + log.error('Upload FAILED for files: "%s". See log file for details.', " ".join(map(str, paths))) + raise + else: + log.info("Upload succeeded.") + + +class WindowsProfile: + MINIMAL = [ + NTFS, + EventLogs, + Registry, + Tasks, + PowerShell, + Prefetch, + Appcompat, + PCA, + Misc, + Startup, + ] + DEFAULT = [ + *MINIMAL, + ETL, + Recents, + RecycleBin, + Drivers, + Syscache, + WBEM, + AV, + BITS, + DHCP, + DNS, + ActiveDirectory, + RemoteAccess, + ActivitiesCache, + ] + FULL = [ + *DEFAULT, + History, + NTDS, + QuarantinedFiles, + WindowsNotifications, + SSH, + IIS, + TextEditor, + ] + + +class LinuxProfile: + MINIMAL = [ + Etc, + Boot, + Home, + SSH, + Var, + ] + DEFAULT = MINIMAL + FULL = [ + *DEFAULT, + History, + WebHosting, + ] + + +class BsdProfile: + MINIMAL = [ + Etc, + Boot, + Home, + SSH, + Var, + BSD, + ] + DEFAULT = MINIMAL + FULL = MINIMAL + + +class ESXiProfile: + MINIMAL = [ + Bootbanks, + ESXi, + SSH, + ] + DEFAULT = [ + *MINIMAL, + VMFS, + ] + FULL = DEFAULT + + +class OSXProfile: + MINIMAL = [ + Etc, + Home, + Var, + OSX, + OSXApplicationsInfo, + ] + DEFAULT = MINIMAL + FULL = [ + *DEFAULT, + History, + SSH, + ] + + +PROFILES = { + "full": { + "windows": WindowsProfile.FULL, + "linux": LinuxProfile.FULL, + "bsd": BsdProfile.FULL, + "esxi": ESXiProfile.FULL, + "osx": OSXProfile.FULL, + }, + "default": { + "windows": WindowsProfile.DEFAULT, + "linux": LinuxProfile.DEFAULT, + "bsd": BsdProfile.DEFAULT, + "esxi": ESXiProfile.DEFAULT, + "osx": OSXProfile.DEFAULT, + }, + "minimal": { + "windows": WindowsProfile.MINIMAL, + "linux": LinuxProfile.MINIMAL, + "bsd": BsdProfile.MINIMAL, + "esxi": ESXiProfile.MINIMAL, + "osx": OSXProfile.MINIMAL, + }, + "none": None, +} + + +class VolatileProfile: + DEFAULT = [ + Netstat, + WinProcesses, + WinProcEnv, + WinArpCache, + WinRDPSessions, + WinDnsClientCache, + ] + EXTENSIVE = [ + Proc, + Sys, + ] + + +VOLATILE = { + "default": { + "windows": VolatileProfile.DEFAULT, + "linux": [], + "bsd": [], + "esxi": [], + "osx": [], + }, + "extensive": { + "windows": VolatileProfile.DEFAULT, + "linux": VolatileProfile.EXTENSIVE, + "bsd": VolatileProfile.EXTENSIVE, + "esxi": VolatileProfile.EXTENSIVE, + "osx": [], + }, + "none": None, +} + + +def exit_success(default_args: list[str]): + log.info("Acquire finished successful") + log.info("Arguments: %s", " ".join(sys.argv[1:])) + log.info("Default Arguments: %s", " ".join(default_args)) + log.info("Exiting with status code 0 (SUCCESS)") + sys.exit(0) + + +def exit_failure(default_args: list[str]): + log.error("Acquire FAILED") + log.error("Arguments: %s", " ".join(sys.argv[1:])) + log.error("Default Arguments: %s", " ".join(default_args)) + log.error("Exiting with status code 1 (FAILURE)") + sys.exit(1) + + +def main() -> None: + parser = create_argument_parser(PROFILES, VOLATILE, MODULES) + args, rest = parse_acquire_args(parser, config=CONFIG) + + # Since output has a default value, set it to None when output_file is defined + if args.output_file: + args.output = None + + try: + check_and_set_log_args(args) + except ValueError as err: + parser.exit(err) + + if args.log_to_dir: + # When args.upload files are specified, only these files are uploaded + # and no other action is done. Thus a log file specifically named + # Upload_.log is created + file_prefix = "Upload" if args.upload else "Unknown" + log_file = args.log_path.joinpath(format_output_name(file_prefix, args.start_time, "log")) + else: + log_file = args.log_path + + setup_logging(log, log_file, args.verbose, delay=args.log_delay) + + acquire_successful = True + files_to_upload = [log_file] + acquire_gui = None + try: + log.info(ACQUIRE_BANNER) + log.info("User: %s | Admin: %s", get_user_name(), is_user_admin()) + log.info("Arguments: %s", " ".join(sys.argv[1:])) + log.info("Default Arguments: %s", " ".join(args.config.get("arguments"))) + log.info("") + + # start GUI if requested through CLI / config + flavour = None + if args.gui == "always" or ( + args.gui == "depends" and os.environ.get("PYS_KEYSOURCE") == "prompt" and len(sys.argv) == 1 + ): + flavour = platform.system() + acquire_gui = GUI(flavour=flavour, upload_available=args.auto_upload) + + args.output, args.auto_upload, cancel = acquire_gui.wait_for_start(args) + if cancel: + log.info("Acquire cancelled") + exit_success(args.config.get("arguments")) + # From here onwards, the GUI will be locked and cannot be closed because we're acquiring + + plugins_to_load = [("cloud", MinIO)] + upload_plugins = UploaderRegistry("acquire.plugins", plugins_to_load) + + check_and_set_acquire_args(args, upload_plugins) + + if args.upload: + try: + upload_files(args.upload, args.upload_plugin, args.no_proxy) + except Exception as err: + acquire_gui.message("Failed to upload files") + log.exception(err) + exit_failure(args.config.get("arguments")) + exit_success(args.config.get("arguments")) + + target_paths = [] + for target_path in args.targets: + target_path = args_to_uri([target_path], args.loader, rest)[0] if args.loader else target_path + if target_path == "local": + target_query = {} + if args.force_fallback: + target_query.update({"force-directory-fs": 1}) + + if args.fallback: + target_query.update({"fallback-to-directory-fs": 1}) + + target_query = urllib.parse.urlencode(target_query) + target_path = f"{target_path}?{target_query}" + target_paths.append(target_path) + + try: + target_name = "Unknown" # just in case open_all already fails + for target in Target.open_all(target_paths): + target_name = "Unknown" # overwrite previous target name + target_name = target.name + log.info("Loading target %s", target_name) + log.info(target) + if target.os == "esxi" and target.name == "local": + # Loader found that we are running on an esxi host + # Perform operations to "enhance" memory + with esxi_memory_context_manager(): + files_to_upload = acquire_children_and_targets(target, args) + else: + files_to_upload = acquire_children_and_targets(target, args) + except Exception: + log.error("Failed to acquire target: %s", target_name) + if not is_user_admin(): + log.error("Try re-running as administrator/root") + acquire_gui.message("This application must be run as administrator.") + raise + + files_to_upload = sort_files(files_to_upload) + + except Exception as err: + log.error("Acquiring artifacts FAILED") + log.exception(err) + acquire_successful = False + else: + log.info("Acquiring artifacts succeeded") + + try: + # The auto-upload of files is done at the very very end to make sure any + # logged exceptions are written to the log file before uploading. + # This means that any failures from this point on will not be part of the + # uploaded log files, they will be written to the logfile on disk though. + if args.auto_upload and args.upload_plugin and files_to_upload: + try: + log_file_handler = get_file_handler(log) + if log_file_handler: + log_file_handler.close() + + upload_files(files_to_upload, args.upload_plugin) + except Exception: + if acquire_gui: + acquire_gui.message("Failed to upload files") + raise + + if acquire_gui: + acquire_gui.finish() + acquire_gui.wait_for_quit() + + except Exception as err: + acquire_successful = False + log.exception(err) + + if acquire_successful: + exit_success(args.config.get("arguments")) + else: + exit_failure(args.config.get("arguments")) + + +def load_child(target: Target, child_path: Path) -> None: + log.info("") + log.info("Loading child target %s", child_path) + try: + child = target.open_child(child_path) + log.info(target) + except Exception: + log.exception("Failed to load child target") + raise + + return child + + +def acquire_children_and_targets(target: Target, args: argparse.Namespace) -> list[str | Path]: + if args.child: + target = load_child(target, args.child) + + log.info("") + + files = [] + acquire_gui = GUI() + + counter = 0 + progress_limit = 50 if args.auto_upload else 90 + total_targets = 0 + if args.children: + total_targets += len(list(target.list_children())) + + if (args.children and not args.skip_parent) or not args.children: + total_targets += 1 + counter += 1 + acquire_gui.shard = int((progress_limit / total_targets) * counter) + try: + files.extend(acquire_target(target, args, args.start_time)) + + except Exception: + log.error("Failed to acquire main target") + acquire_gui.message("Failed to acquire target") + acquire_gui.wait_for_quit() + raise + + if args.children: + for child in target.list_children(): + counter += 1 + acquire_gui.shard = int((progress_limit / total_targets) * counter) + try: + child_target = load_child(target, child.path) + except Exception: + continue + + log.info("") + + try: + child_files = acquire_target(child_target, args) + files.extend(child_files) + except Exception: + log.exception("Failed to acquire child target %s", child_target.name) + acquire_gui.message("Failed to acquire child target") + continue + + return files + + +def sort_files(files: list[Union[str, Path]]) -> list[Path]: + log_files: list[Path] = [] + tar_paths: list[Path] = [] + report_paths: list[Path] = [] + + suffix_map = {".log": log_files, ".json": report_paths} + + for file in files: + if isinstance(file, str): + file = Path(file) + + suffix_map.get(file.suffix, tar_paths).append(file) + + # Reverse log paths, as the first one in ``files`` is the main one. + log_files.reverse() + + return tar_paths + report_paths + log_files + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + sys.exit(1) + except Exception: + sys.exit(1) diff --git a/acquire/dynamic/windows/arp.py b/acquire/dynamic/windows/arp.py index b734c05a..3d391490 100644 --- a/acquire/dynamic/windows/arp.py +++ b/acquire/dynamic/windows/arp.py @@ -6,21 +6,17 @@ from acquire.dynamic.windows.iphlpapi import ( ADDRESS_FAMILY, - ERROR_NO_DATA, - ERROR_NOT_SUPPORTED, IF_OPER_STATUS, IF_TYPE, IP_ADAPTER_ADDRESSES, LPVOID, MIB_IPNET_ROW2, MIB_IPNET_TABLE2, - MIB_IPNETROW, - MIB_IPNETTABLE, + NL_NEIGHBOR_STATE, NO_ERROR, ULONG, FreeMibTable, GetAdaptersAddresses, - GetIpNetTable, GetIpNetTable2, ) @@ -100,22 +96,34 @@ def __str__(self) -> str: class NetNeighbor: - def __init__(self, family: ADDRESS_FAMILY, address: str, mac: str | None, adapter: NetAdapter | None): - self.family: ADDRESS_FAMILY = family - self.address: str = address - self.mac: str | None = mac - self.adapter: NetAdapter | None = adapter + def __init__( + self, + family: ADDRESS_FAMILY, + address: str, + mac: str | None, + state: NL_NEIGHBOR_STATE, + adapter: NetAdapter | None + ): + self.family: ADDRESS_FAMILY = family + self.address: str = address + self.mac: str | None = mac + self.state: NL_NEIGHBOR_STATE = state + self.adapter: NetAdapter | None = adapter def as_dict(self) -> dict: return { 'family': self.family.name, 'address': self.address, 'mac': self.mac if self.mac else '', + 'state': self.state.name, 'adapter': self.adapter.as_dict() } def __str__(self) -> str: - return f"NetNeighbor(family={self.family.name}, address={self.address}, mac={self.mac}, adapter={self.adapter})" + return ( + f"NetNeighbor(family={self.family.name}, address={self.address}," + f"mac={self.mac}, state={self.state.name}, adapter={self.adapter})" + ) def get_windows_network_adapters() -> list[NetAdapter]: @@ -153,39 +161,6 @@ def get_adapter_by_index(adapters: list[NetAdapter], index: int) -> NetAdapter | return None -def get_windows_arp_cache(adapters: list[NetAdapter]) -> list[NetNeighbor]: - table_buffer_len = ULONG(0) - status = GetIpNetTable(LPVOID(0), ctypes.byref(table_buffer_len), True) - - if status in [ERROR_NO_DATA, ERROR_NOT_SUPPORTED]: - return [] - - buffer = ctypes.create_string_buffer(table_buffer_len.value) - result = GetIpNetTable(buffer, ctypes.byref(table_buffer_len), True) - - if result != NO_ERROR: - return [] - - table = ctypes.cast(buffer, ctypes.POINTER(MIB_IPNETTABLE)).contents - rows = ctypes.cast(table.table, ctypes.POINTER(MIB_IPNETROW * table.dwNumEntries)).contents - - neighbors = [] - - for row in rows: - adapter = get_adapter_by_index(adapters, row.dwIndex) - - entry = NetNeighbor( - family=ADDRESS_FAMILY.AF_INET, - address=inet_ntop(ADDRESS_FAMILY.AF_INET, row.dwAddr), - mac=format_physical_address(row.bPhysAddr, row.dwPhysAddrLen), - adapter=adapter, - ) - - neighbors.append(entry) - - return neighbors - - def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: table_pointer = ctypes.POINTER(MIB_IPNET_TABLE2)() result = GetIpNetTable2(ADDRESS_FAMILY.AF_UNSPEC, ctypes.byref(table_pointer)) @@ -211,7 +186,8 @@ def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: mac = format_physical_address(row.PhysicalAddress, row.PhysicalAddressLength) adapter = get_adapter_by_index(adapters, row.InterfaceIndex) - neighbor = NetNeighbor(family=ADDRESS_FAMILY(row.Address.si_family), address=address, mac=mac, adapter=adapter) + neighbor = NetNeighbor(family=ADDRESS_FAMILY(row.Address.si_family), address=address, mac=mac, + state=NL_NEIGHBOR_STATE(row.State), adapter=adapter) neighbors.append(neighbor) FreeMibTable(table_pointer) @@ -221,9 +197,10 @@ def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: def format_net_neighbors_csv(net_neighbors: list[NetNeighbor]) -> str: def formatter(neighbor: NetNeighbor) -> str: - return f",".join([str(neighbor.adapter.index), neighbor.address, neighbor.mac if neighbor.mac else ""]) + return f",".join([str(neighbor.adapter.index), neighbor.address, neighbor.mac if neighbor.mac else "", + neighbor.state.name]) - header = ",".join(["interface_index", "ip_address", "mac"]) + header = ",".join(["interface_index", "ip_address", "mac", "state"]) rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) return f"{header}\n{rows}" @@ -236,9 +213,9 @@ def format_net_neighbors_json(net_neighbors: list[NetNeighbor], indent=0) -> str def format_net_neighbors_list(net_neighbors: list[NetNeighbor]) -> str: def formatter(neighbor: NetNeighbor) -> str: mac = neighbor.mac if neighbor.mac else "" - return f"{neighbor.adapter.index:<10}{neighbor.address:<60}{mac:<20}" + return f"{neighbor.adapter.index:<10}{neighbor.address:<60}{mac:<20}{neighbor.state.name:<20}" - header = f"{'ifIndex':<10}{'IP Address':<60}{'MAC Address':<20}" + header = f"{'ifIndex':<10}{'IP Address':<60}{'MAC Address':<20}{'State':<20}" header += "\n" + ('=' * len(header)) rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) diff --git a/acquire/dynamic/windows/iphlpapi.py b/acquire/dynamic/windows/iphlpapi.py index b37f5afa..eadfa91e 100644 --- a/acquire/dynamic/windows/iphlpapi.py +++ b/acquire/dynamic/windows/iphlpapi.py @@ -369,16 +369,6 @@ class SOCKADDR_INET(ctypes.Union): ] -class MIB_IPNETROW(ctypes.Structure): - _fields_: ClassVar[list[tuple[str, type]]] = [ - ("dwIndex", DWORD), - ("dwPhysAddrLen", DWORD), - ("bPhysAddr", ctypes.c_ubyte * MAXLEN_PHYSADDR), - ("dwAddr", ctypes.c_ubyte * 4), - ("dwType", DWORD), - ] - - class MIB_IPNET_ROW2(ctypes.Structure): _fields_: ClassVar[list[tuple[str, type]]] = [ ("Address", SOCKADDR_INET), @@ -401,13 +391,6 @@ class MIB_IPNET_ROW2(ctypes.Structure): _fields_.insert(2, ('Padding', DWORD)) -class MIB_IPNETTABLE(ctypes.Structure): - _fields_: ClassVar[list[tuple[str, type]]] = [ - ("dwNumEntries", DWORD), - ("table", MIB_IPNETROW * 1) - ] - - class MIB_IPNET_TABLE2(ctypes.Structure): _fields_: ClassVar[list[tuple[str, type]]] = [ ("NumEntries", ULONG), @@ -529,7 +512,6 @@ class MIB_UDP6TABLE_OWNER_PID(ctypes.Structure): PULONG = ctypes.POINTER(ULONG) -PMIB_IPNETTABLE = ctypes.POINTER(MIB_IPNETTABLE) PMIB_IPNET_TABLE2 = ctypes.POINTER(MIB_IPNET_TABLE2) PMIB_TCPTABLE_OWNER_PID = ctypes.POINTER(MIB_TCPTABLE_OWNER_PID) PMIB_TCP6TABLE_OWNER_PID = ctypes.POINTER(MIB_TCP6TABLE_OWNER_PID) @@ -539,10 +521,6 @@ class MIB_UDP6TABLE_OWNER_PID(ctypes.Structure): iphlpapi = ctypes.WinDLL("Iphlpapi.dll") # arp calls -GetIpNetTable = iphlpapi.GetIpNetTable -GetIpNetTable.argtypes = [LPVOID, PULONG, BOOL] -GetIpNetTable.restype = ULONG - GetIpNetTable2 = iphlpapi.GetIpNetTable2 GetIpNetTable2.argtypes = [ULONG, ctypes.POINTER(PMIB_IPNET_TABLE2)] GetIpNetTable2.restype = ULONG From 339d523d2a102183f5de9fce06b55b101ad3ccc7 Mon Sep 17 00:00:00 2001 From: lhotlan64 <> Date: Tue, 22 Oct 2024 19:34:57 +0200 Subject: [PATCH 4/5] Correctly commit changes to the acquire file --- acquire/acquire.py | 4628 ++++++++++++++++++++++---------------------- 1 file changed, 2314 insertions(+), 2314 deletions(-) diff --git a/acquire/acquire.py b/acquire/acquire.py index cb664007..caa7b786 100644 --- a/acquire/acquire.py +++ b/acquire/acquire.py @@ -1,2314 +1,2314 @@ -from __future__ import annotations - -import argparse -import enum -import functools -import io -import itertools -import logging -import os -import platform -import shutil -import subprocess -import sys -import time -import urllib.parse -import urllib.request -from collections import defaultdict, namedtuple -from itertools import product -from pathlib import Path -from typing import BinaryIO, Callable, Iterator, Optional, Union - -from dissect.target import Target -from dissect.target.filesystem import Filesystem -from dissect.target.filesystems import ntfs -from dissect.target.helpers import fsutil -from dissect.target.plugins.apps.webserver import iis -from dissect.target.plugins.os.windows.log import evt, evtx -from dissect.target.tools.utils import args_to_uri -from dissect.util.stream import RunlistStream - -from acquire.collector import Collector, get_full_formatted_report, get_report_summary -from acquire.dynamic.windows.named_objects import NamedObjectType -from acquire.dynamic.windows.arp import ( - NetAdapter, - NetNeighbor, - get_windows_network_adapters, - get_windows_net_neighbors, - format_net_neighbors_list -) -from acquire.dynamic.windows.netstat import ( - NetConnection, - get_active_connections, - format_net_connections_list -) -from acquire.esxi import esxi_memory_context_manager -from acquire.gui import GUI -from acquire.hashes import ( - HashFunc, - collect_hashes, - filter_out_by_path_match, - filter_out_by_value_match, - filter_out_huge_files, - serialize_into_csv, -) -from acquire.log import get_file_handler, reconfigure_log_file, setup_logging -from acquire.outputs import OUTPUTS -from acquire.uploaders.minio import MinIO -from acquire.uploaders.plugin import UploaderPlugin, upload_files_using_uploader -from acquire.uploaders.plugin_registry import UploaderRegistry -from acquire.utils import ( - check_and_set_acquire_args, - check_and_set_log_args, - create_argument_parser, - format_output_name, - get_formatted_exception, - get_user_name, - get_utc_now, - get_utc_now_str, - is_user_admin, - normalize_path, - parse_acquire_args, - persist_execution_report, -) - -try: - from acquire.version import version -except ImportError: - version = "0.0.dev" - -try: - # Injected by pystandalone builder - from acquire.config import CONFIG -except ImportError: - CONFIG = defaultdict(lambda: None) - - -VERSION = version -ACQUIRE_BANNER = r""" - _ - __ _ ___ __ _ _ _(_)_ __ ___ - / _` |/ __/ _` | | | | | '__/ _ \ -| (_| | (_| (_| | |_| | | | | __/ - \__,_|\___\__, |\__,_|_|_| \___| - by Fox-IT |_| v{} - part of NCC Group -""".format( - VERSION -)[ - 1: -] - -MODULES = {} -MODULE_LOOKUP = {} - -CLI_ARGS_MODULE = "cli-args" - -log = logging.getLogger("acquire") -log.propagate = 0 -log_file_handler = None -logging.lastResort = None -logging.raiseExceptions = False - - -def misc_windows_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - misc_dirs = { - ("Windows/ServiceProfiles/LocalService", False), - ("Windows/ServiceProfiles/NetworkService", False), - ("Windows/System32/config/systemprofile", False), - ("Users", True), - ("Documents and Settings", True), - } - - for fs in target.fs.path().iterdir(): - if fs.name.lower() == "c:": - continue - - for misc_dir, get_subdirs in misc_dirs: - misc_path = fs.joinpath(misc_dir) - - if not misc_path.exists(): - continue - - if get_subdirs: - for entry in misc_path.iterdir(): - if entry.is_dir(): - yield entry - else: - yield misc_path - - -def misc_unix_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - user_dirs = ["root", "home/*"] - - home_dirs = (target.fs.path("/").glob(path) for path in user_dirs) - for home_dir in itertools.chain.from_iterable(home_dirs): - yield home_dir - - -def misc_osx_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: - for homedir in itertools.chain(target.fs.path("/Users/").glob("*"), misc_unix_user_homes(target)): - yield homedir - - -MISC_MAPPING = { - "osx": misc_osx_user_homes, - "windows": misc_windows_user_homes, -} - - -def from_user_home(target: Target, path: str) -> Iterator[str]: - try: - for user_details in target.user_details.all_with_home(): - yield user_details.home_path.joinpath(path).as_posix() - except Exception as e: - log.warning("Error occurred when requesting all user homes") - log.debug("", exc_info=e) - - misc_user_homes = MISC_MAPPING.get(target.os, misc_unix_user_homes) - for user_dir in misc_user_homes(target): - yield user_dir.joinpath(path).as_posix() - - -def iter_ntfs_filesystems(target: Target) -> Iterator[tuple[ntfs.NtfsFilesystem, Optional[str], str, str]]: - mount_lookup = defaultdict(list) - for mount, fs in target.fs.mounts.items(): - mount_lookup[fs].append(mount) - - for fs in target.filesystems: - # The attr check is needed to correctly collect fake NTFS filesystems - # where the MFT etc. are added to a VirtualFilesystem. This happens for - # instance when the target is an acquired tar target. - if not isinstance(fs, ntfs.NtfsFilesystem) and not hasattr(fs, "ntfs"): - log.warning("Skipping %s - not an NTFS filesystem", fs) - continue - - if fs in mount_lookup: - mountpoints = mount_lookup[fs] - - for main_mountpoint in mountpoints: - if main_mountpoint != "sysvol": - break - - name = main_mountpoint - mountpoints = ", ".join(mountpoints) - else: - main_mountpoint = None - name = f"vol-{fs.ntfs.serial:x}" - mountpoints = "No mounts" - log.warning("Unmounted NTFS filesystem found %s (%s)", fs, name) - - yield fs, main_mountpoint, name, mountpoints - - -def iter_esxi_filesystems(target: Target) -> Iterator[tuple[Filesystem, str, str, Optional[str]]]: - for mount, fs in target.fs.mounts.items(): - if not mount.startswith("/vmfs/volumes/"): - continue - - uuid = mount[len("/vmfs/volumes/") :] # strip /vmfs/volumes/ - name = None - if fs.__type__ == "fat": - name = fs.volume.name - elif fs.__type__ == "vmfs": - name = fs.vmfs.label - - yield fs, mount, uuid, name - - -def register_module(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: - def wrapper(module_cls: type[Module]) -> type[Module]: - name = module_cls.__name__ - - if name in MODULES: - raise ValueError( - f"Module name is already registered: registration for {module_cls} conflicts with {MODULES[name]}" - ) - - desc = module_cls.DESC or name - kwargs["help"] = f"acquire {desc}" - kwargs["action"] = argparse.BooleanOptionalAction - kwargs["dest"] = name.lower() - module_cls.__modname__ = name - - if not hasattr(module_cls, "__cli_args__"): - module_cls.__cli_args__ = [] - module_cls.__cli_args__.append((args, kwargs)) - - MODULES[name] = module_cls - return module_cls - - return wrapper - - -def module_arg(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: - def wrapper(module_cls: type[Module]) -> type[Module]: - if not hasattr(module_cls, "__cli_args__"): - module_cls.__cli_args__ = [] - module_cls.__cli_args__.append((args, kwargs)) - return module_cls - - return wrapper - - -def local_module(cls: type[object]) -> object: - """A decorator that sets property `__local__` on a module class to mark it for local target only""" - cls.__local__ = True - return cls - - -class ExecutionOrder(enum.IntEnum): - TOP = 0 - DEFAULT = 1 - BOTTOM = 2 - - -class Module: - DESC = None - SPEC = [] - EXEC_ORDER = ExecutionOrder.DEFAULT - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - desc = cls.DESC or cls.__name__.lower() - log.info("*** Acquiring %s", desc) - - with collector.bind_module(cls): - collector.collect(cls.SPEC) - - spec_ext = cls.get_spec_additions(target, cli_args) - if spec_ext: - collector.collect(list(spec_ext)) - - cls._run(target, cli_args, collector) - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - pass - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - pass - - -@register_module("--sys") -@local_module -class Sys(Module): - DESC = "Sysfs files (live systems only)" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - spec = [("dir", "/sys")] - collector.collect(spec, follow=False, volatile=True) - - -@register_module("--proc") -@local_module -class Proc(Module): - DESC = "Procfs files (live systems only)" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - spec = [("dir", "/proc")] - collector.collect(spec, follow=False, volatile=True) - - -@register_module("-n", "--ntfs") -class NTFS(Module): - DESC = "NTFS filesystem metadata" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): - log.info("Acquiring from %s as %s (%s)", fs, name, mountpoints) - - for filename in ("$MFT", "$Boot", "$Secure:$SDS"): - if main_mountpoint is not None: - path = fsutil.join(main_mountpoint, filename) - collector.collect_path(path) - - else: - # In case the NTFS filesystem is not mounted, which should not occur but - # iter_ntfs_filesystems allows for the possibility, we fall back to raw file - # collection. - collector.collect_file_raw(filename, fs, name) - - cls.collect_usnjrnl(collector, fs, name) - - @classmethod - def collect_usnjrnl(cls, collector: Collector, fs: Filesystem, name: str) -> None: - def usnjrnl_accessor(journal: BinaryIO) -> tuple[BinaryIO, int]: - # If the filesystem is a virtual NTFS filesystem, journal will be - # plain BinaryIO, not a RunlistStream. - if isinstance(journal, RunlistStream): - i = 0 - while journal.runlist[i][0] is None: - journal.seek(journal.runlist[i][1] * journal.block_size, io.SEEK_CUR) - i += 1 - size = journal.size - journal.tell() - else: - size = journal.size - - return (journal, size) - - collector.collect_file_raw( - "$Extend/$Usnjrnl:$J", - fs, - name, - file_accessor=usnjrnl_accessor, - ) - - -@register_module("-r", "--registry") -class Registry(Module): - DESC = "registry hives" - HIVES = ["drivers", "sam", "security", "software", "system", "default"] - SPEC = [ - ("dir", "sysvol/windows/system32/config/txr"), - ("dir", "sysvol/windows/system32/config/regback"), - ("glob", "sysvol/System Volume Information/_restore*/RP*/snapshot/_REGISTRY_*"), - ("glob", "ntuser.dat*", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/UsrClass.dat*", from_user_home), - ("glob", "Local Settings/Application Data/Microsoft/Windows/UsrClass.dat*", from_user_home), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - # Glob all hives to include e.g. .LOG files and .regtrans-ms files. - files = [] - for hive in cls.HIVES: - pattern = "sysvol/windows/system32/config/{}*".format(hive) - for entry in target.fs.path().glob(pattern): - if entry.is_file(): - files.append(("file", entry)) - return files - - -@register_module("--netstat") -@local_module -class Netstat(Module): - DESC = "Windows network connections" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - net_connections: list[NetConnection] = get_active_connections() - output = format_net_connections_list(net_connections) - - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE - full_output_path = fsutil.join(output_base, "netstat") - - collector.output.write_bytes(full_output_path, output.encode()) - collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) - - -@register_module("--win-processes") -@local_module -class WinProcesses(Module): - DESC = "Windows process list" - SPEC = [ - ("command", (["tasklist", "/V", "/fo", "csv"], "win-processes")), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--win-proc-env") -@local_module -class WinProcEnv(Module): - DESC = "Process environment variables" - SPEC = [ - ( - "command", - ( - ["PowerShell", "-command", "Get-Process | ForEach-Object {$_.StartInfo.EnvironmentVariables}"], - "win-process-env-vars", - ), - ), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--win-arp-cache") -@local_module -class WinArpCache(Module): - DESC = "ARP Cache" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - network_adapters: list[NetAdapter] = get_windows_network_adapters() - neighbors = get_windows_net_neighbors(network_adapters) - output = format_net_neighbors_list(neighbors) - - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE - full_output_path = fsutil.join(output_base, "arp-cache") - - collector.output.write_bytes(full_output_path, output.encode()) - collector.report.add_command_collected(cls.__name__, ["arp-cache"]) - - -@register_module("--win-rdp-sessions") -@local_module -class WinRDPSessions(Module): - DESC = "Windows Remote Desktop session information" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - # where.exe instead of where, just in case the client runs in PS instead of CMD - # by default where hides qwinsta on 32-bit systems because qwinsta is only 64-bit, but with recursive /R search - # we can still manage to find it and by passing the exact path Windows will launch a 64-bit process - # on systems capable of doing that. - qwinsta = subprocess.run( - ["where.exe", "/R", os.environ["WINDIR"], "qwinsta.exe"], capture_output=True, text=True - ).stdout.split("\n")[0] - return [ - ("command", ([qwinsta, "/VM"], "win-rdp-sessions")), - ] - - -@register_module("--winpmem") -@local_module -class WinMemDump(Module): - DESC = "Windows full memory dump" - EXEC_ORDER = ExecutionOrder.BOTTOM - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - winpmem_file_name = "winpmem.exe" - winpmem_exec = shutil.which(winpmem_file_name) - - command_parts = [winpmem_exec, "-"] - - if winpmem_exec is None: - command_parts.pop(0) - command_parts.insert(0, winpmem_file_name) - collector.report.add_command_failed(cls.__name__, command_parts) - log.error( - "- Failed to collect output from command `%s`, program %s not found", - " ".join(command_parts), - winpmem_file_name, - ) - return - - else: - log.info("- Collecting output from command `%s`", " ".join(command_parts)) - - mem_dump_path = collector.output.path.with_name("winpmem") - mem_dump_errors_path = mem_dump_path.with_name("winpmem.errors") - - output_base = collector.COMMAND_OUTPUT_BASE - if collector.base: - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) - - mem_dump_output_path = fsutil.join(output_base, mem_dump_path.name) - mem_dump_errors_output_path = fsutil.join(output_base, mem_dump_errors_path.name) - - with mem_dump_path.open(mode="wb") as mem_dump_fh: - with mem_dump_errors_path.open(mode="wb") as mem_dump_errors_fh: - try: - # The shell parameter must be set to False, as otherwise the - # output from stdout is not piped into the filehandle. - # The check parameter must be set to False, as winpmem.exe - # always seems to exit with an error code, even on success. - subprocess.run( - bufsize=0, - args=command_parts, - stdout=mem_dump_fh, - stderr=mem_dump_errors_fh, - shell=False, - check=False, - ) - - except Exception: - collector.report.add_command_failed(cls.__name__, command_parts) - log.error( - "- Failed to collect output from command `%s`", - " ".join(command_parts), - exc_info=True, - ) - return - - collector.output.write_entry(mem_dump_output_path, mem_dump_path) - collector.output.write_entry(mem_dump_errors_output_path, mem_dump_errors_path) - collector.report.add_command_collected(cls.__name__, command_parts) - mem_dump_path.unlink() - mem_dump_errors_path.unlink() - - -@register_module("--winmem-files") -class WinMemFiles(Module): - DESC = "Windows memory files" - SPEC = [ - ("file", "sysvol/pagefile.sys"), - ("file", "sysvol/hiberfil.sys"), - ("file", "sysvol/swapfile.sys"), - ("file", "sysvol/windows/memory.dmp"), - ("dir", "sysvol/windows/minidump"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - page_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Memory Management" - for reg_key in target.registry.iterkeys(page_key): - for page_path in reg_key.value("ExistingPageFiles").value: - spec.add(("file", target.resolve(page_path))) - - crash_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\CrashControl" - for reg_key in target.registry.iterkeys(crash_key): - spec.add(("file", target.resolve(reg_key.value("DumpFile").value))) - spec.add(("dir", target.resolve(reg_key.value("MinidumpDir").value))) - - return spec - - -@register_module("-e", "--eventlogs") -class EventLogs(Module): - DESC = "event logs" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - evt_log_paths = evt.EvtPlugin(target).get_logs(filename_glob="*.evt") - for path in evt_log_paths: - spec.add(("file", path)) - evtx_log_paths = evtx.EvtxPlugin(target).get_logs(filename_glob="*.evtx") - for path in evtx_log_paths: - spec.add(("file", path)) - return spec - - -@register_module("-t", "--tasks") -class Tasks(Module): - SPEC = [ - ("dir", "sysvol/windows/tasks"), - ("dir", "sysvol/windows/system32/tasks"), - ("dir", "sysvol/windows/syswow64/tasks"), - ("dir", "sysvol/windows/sysvol/domain/policies"), - ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), - # Task Scheduler Service transaction log - ("file", "sysvol/SchedLgU.txt"), - ("file", "sysvol/windows/SchedLgU.txt"), - ("file", "sysvol/windows/tasks/SchedLgU.txt"), - ("file", "sysvol/winnt/tasks/SchedLgU.txt"), - ] - - -@register_module("-ad", "--active-directory") -class ActiveDirectory(Module): - DESC = "Active Directory data (policies, scripts, etc.)" - SPEC = [ - ("dir", "sysvol/windows/sysvol/domain"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters" - for reg_key in target.registry.iterkeys(key): - try: - spec.add(("dir", reg_key.value("SysVol").value)) - except Exception: - pass - return spec - - -@register_module("-nt", "--ntds") -class NTDS(Module): - SPEC = [ - ("dir", "sysvol/windows/NTDS"), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - key = "HKLM\\SYSTEM\\CurrentControlSet\\services\\NTDS\\Parameters" - values = [ - ("dir", "DSA Working Directory"), - ("file", "DSA Database File"), - ("file", "Database backup path"), - ("dir", "Database log files path"), - ] - for reg_key in target.registry.iterkeys(key): - for collect_type, value in values: - path = reg_key.value(value).value - spec.add((collect_type, path)) - - return spec - - -@register_module("--etl") -class ETL(Module): - DESC = "interesting ETL files" - SPEC = [ - ("glob", "sysvol/Windows/System32/WDI/LogFiles/*.etl"), - ] - - -@register_module("--recents") -class Recents(Module): - DESC = "Windows recently used files artifacts" - SPEC = [ - ("dir", "AppData/Roaming/Microsoft/Windows/Recent", from_user_home), - ("dir", "AppData/Roaming/Microsoft/Office/Recent", from_user_home), - ("glob", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/*.lnk", from_user_home), - ("glob", "Desktop/*.lnk", from_user_home), - ("glob", "Recent/*.lnk", from_user_home), - ("glob", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/*.lnk"), - ] - - -@register_module("--startup") -class Startup(Module): - DESC = "Windows Startup folder" - SPEC = [ - ("dir", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/Startup"), - ("dir", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup", from_user_home), - ] - - -def recyclebin_filter(path: fsutil.TargetPath) -> bool: - return bool(path.stat().st_size >= (10 * 1024 * 1024)) # 10MB - - -@register_module("--recyclebin") -@module_arg( - "--large-files", - action=argparse.BooleanOptionalAction, - help="Collect files larger than 10MB in the Recycle Bin", -) -@module_arg( - "--data-files", - action=argparse.BooleanOptionalAction, - help="Collect the data files in the Recycle Bin", -) -class RecycleBin(Module): - DESC = "recycle bin metadata and data files" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - large_files_filter = None if cli_args.large_files else recyclebin_filter - - if large_files_filter: - log.info("Skipping files in Recycle Bin that are larger than 10MB.") - - patterns = ["$Recycle.bin/*/$I*", "Recycler/*/INFO2", "Recycled/INFO2"] - - if cli_args.data_files is None or cli_args.data_files: - patterns.extend(["$Recycle.Bin/$R*", "$Recycle.Bin/*/$R*", "RECYCLE*/D*"]) - - with collector.file_filter(large_files_filter): - for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): - log.info("Acquiring recycle bin from %s as %s (%s)", fs, name, mountpoints) - - for pattern in patterns: - if main_mountpoint is not None: - pattern = fsutil.join(main_mountpoint, pattern) - collector.collect_glob(pattern) - else: - # In case the NTFS filesystem is not mounted, which should not occur but - # iter_ntfs_filesystems allows for the possibility, we fall back to raw file - # collection. - for entry in fs.path().glob(pattern): - if entry.is_file(): - collector.collect_file_raw(fs, entry, name) - - -@register_module("--drivers") -class Drivers(Module): - DESC = "installed drivers" - SPEC = [ - ("glob", "sysvol/windows/system32/drivers/*.sys"), - ] - - -@register_module("--exchange") -class Exchange(Module): - DESC = "interesting Exchange configuration files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - key = "HKLM\\SOFTWARE\\Microsoft\\ExchangeServer" - for reg_key in target.registry.iterkeys(key): - for subkey in reg_key.subkeys(): - try: - setup_key = subkey.subkey("Setup") - install_path = setup_key.value("MsiInstallPath").value - spec.update( - [ - ( - "file", - f"{install_path}\\TransportRoles\\Agents\\agents.config", - ), - ( - "dir", - f"{install_path}\\Logging\\Ews", - ), - ( - "dir", - f"{install_path}\\Logging\\CmdletInfra\\Powershell-Proxy\\Cmdlet", - ), - ( - "dir", - f"{install_path}\\TransportRoles\\Logs", - ), - ] - ) - except Exception: - pass - return spec - - -@register_module("--iis") -class IIS(Module): - DESC = "IIS logs" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set( - [ - ("glob", "sysvol\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\Windows.old\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\inetpub\\logs\\LogFiles\\*.log"), - ("glob", "sysvol\\inetpub\\logs\\LogFiles\\W3SVC*\\*.log"), - ("glob", "sysvol\\Resources\\Directory\\*\\LogFiles\\Web\\W3SVC*\\*.log"), - ] - ) - iis_plugin = iis.IISLogsPlugin(target) - spec.update([("file", log_path) for _, log_path in iis_plugin.iter_log_format_path_pairs()]) - return spec - - -@register_module("--prefetch") -class Prefetch(Module): - DESC = "Windows Prefetch files" - SPEC = [ - ("dir", "sysvol/windows/prefetch"), - ] - - -@register_module("--appcompat") -class Appcompat(Module): - DESC = "Windows Amcache and RecentFileCache" - SPEC = [ - ("dir", "sysvol/windows/appcompat"), - ] - - -@register_module("--pca") -class PCA(Module): - DESC = "Windows Program Compatibility Assistant" - SPEC = [ - ("dir", "sysvol/windows/pca"), - ] - - -@register_module("--syscache") -class Syscache(Module): - DESC = "Windows Syscache hive and log files" - SPEC = [ - ("file", "sysvol/System Volume Information/Syscache.hve"), - ("glob", "sysvol/System Volume Information/Syscache.hve.LOG*"), - ] - - -@register_module("--win-notifications") -class WindowsNotifications(Module): - DESC = "Windows Push Notifications Database files." - SPEC = [ - # Old Win7/Win10 version of the file - ("file", "AppData/Local/Microsoft/Windows/Notifications/appdb.dat", from_user_home), - # New version of the file - ("file", "AppData/Local/Microsoft/Windows/Notifications/wpndatabase.db", from_user_home), - ] - - -@register_module("--bits") -class BITS(Module): - DESC = "Background Intelligent Transfer Service (BITS) queue/log DB" - SPEC = [ - # Pre-Win10 the BITS DB files are called qmgr[01].dat, in Win10 it is - # called qmgr.db and its transaction logs edb.log and edb.log[0-2] - # Win 2000/XP/2003 path - # (basically: \%ALLUSERSPROFILE%\Application Data\Microsoft\...) - ("glob", "sysvol/Documents and Settings/All Users/Application Data/Microsoft/Network/Downloader/qmgr*.dat"), - # Win Vista and higher path - # (basically: \%ALLUSERSPROFILE%\Microsoft\...; %ALLUSERSPROFILE% == %PROGRAMDATA%) - ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr*.dat"), - # Win 10 files - ("file", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr.db"), - ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/edb.log*"), - ] - - -@register_module("--wbem") -class WBEM(Module): - DESC = "Windows WBEM (WMI) database files" - SPEC = [ - ("dir", "sysvol/windows/system32/wbem/Repository"), - ] - - -@register_module("--dhcp") -class DHCP(Module): - DESC = "Windows Server DHCP files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\DhcpServer\\Parameters" - for reg_key in target.registry.iterkeys(key): - spec.add(("dir", reg_key.value("DatabasePath").value)) - return spec - - -@register_module("--dns") -class DNS(Module): - DESC = "Windows Server DNS files" - SPEC = [ - ("glob", "sysvol/windows/system32/config/netlogon.*"), - ("dir", "sysvol/windows/system32/dns"), - ] - - -@register_module("--win-dns-cache") -@local_module -class WinDnsClientCache(Module): - DESC = "The contents of Windows DNS client cache" - SPEC = [ - ( - "command", - # Powershell.exe understands a subcommand passed as single string parameter, - # no need to split the subcommand in parts. - ( - ["powershell.exe", "-Command", "Get-DnsClientCache | ConvertTo-Csv -NoTypeInformation"], - "get-dnsclientcache", - ), - ), - ] - EXEC_ORDER = ExecutionOrder.BOTTOM - - -@register_module("--powershell") -class PowerShell(Module): - DESC = "Windows PowerShell Artefacts" - SPEC = [ - ("dir", "AppData/Roaming/Microsoft/Windows/PowerShell", from_user_home), - ] - - -@register_module("--thumbnail-cache") -class ThumbnailCache(Module): - DESC = "Windows thumbnail db artifacts" - SPEC = [ - ("glob", "AppData/Local/Microsoft/Windows/Explorer/thumbcache_*", from_user_home), - ] - - -@register_module("--text-editor") -class TextEditor(Module): - DESC = "text editor (un)saved tab contents" - # Only Windows 11 notepad & Notepad++ tabs for now, but locations for other text editors may be added later. - SPEC = [ - ("dir", "AppData/Local/Packages/Microsoft.WindowsNotepad_8wekyb3d8bbwe/LocalState/TabState/", from_user_home), - ("dir", "AppData/Roaming/Notepad++/backup/", from_user_home), - ] - - -@register_module("--misc") -class Misc(Module): - DESC = "miscellaneous Windows artefacts" - SPEC = [ - ("file", "sysvol/windows/PFRO.log"), - ("file", "sysvol/windows/setupapi.log"), - ("file", "sysvol/windows/setupapidev.log"), - ("glob", "sysvol/windows/inf/setupapi*.log"), - ("glob", "sysvol/system32/logfiles/*/*.txt"), - ("dir", "sysvol/windows/system32/sru"), - ("dir", "sysvol/windows/system32/drivers/etc"), - ("dir", "sysvol/Windows/System32/WDI/LogFiles/StartupInfo"), - ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), - ("dir", "sysvol/ProgramData/Microsoft/Group Policy/History/"), - ("dir", "AppData/Local/Microsoft/Group Policy/History/", from_user_home), - ("glob", "sysvol/Windows/System32/LogFiles/SUM/*.mdb"), - ("glob", "sysvol/ProgramData/USOShared/Logs/System/*.etl"), - ("glob", "sysvol/Windows/Logs/WindowsUpdate/WindowsUpdate*.etl"), - ("glob", "sysvol/Windows/Logs/CBS/CBS*.log"), - ("dir", "sysvol/ProgramData/Microsoft/Search/Data/Applications/Windows"), - ("dir", "sysvol/Windows/SoftwareDistribution/DataStore"), - ] - - -@register_module("--av") -class AV(Module): - DESC = "various antivirus logs" - SPEC = [ - # AVG - ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/log"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/report"), - ("dir", "sysvol/ProgramData/AVG/Antivirus/log"), - ("dir", "sysvol/ProgramData/AVG/Antivirus/report"), - # Avast - ("dir", "sysvol/Documents And Settings/All Users/Application Data/Avast Software/Avast/Log"), - ("dir", "sysvol/ProgramData/Avast Software/Avast/Log"), - ("dir", "Avast Software/Avast/Log", from_user_home), - ("file", "sysvol/ProgramData/Avast Software/Avast/Chest/index.xml"), - # Avira - ("dir", "sysvol/ProgramData/Avira/Antivirus/LOGFILES"), - ("dir", "sysvol/ProgramData/Avira/Security/Logs"), - ("dir", "sysvol/ProgramData/Avira/VPN"), - # Bitdefender - ("dir", "sysvol/ProgramData/Bitdefender/Endpoint Security/Logs"), - ("dir", "sysvol/ProgramData/Bitdefender/Desktop/Profiles/Logs"), - ("glob", "sysvol/Program Files*/Bitdefender*/*"), - # ComboFix - ("file", "sysvol/ComboFix.txt"), - # Cybereason - ("dir", "sysvol/ProgramData/crs1/Logs"), - ("dir", "sysvol/ProgramData/apv2/Logs"), - ("dir", "sysvol/ProgramData/crb1/Logs"), - # Cylance - ("dir", "sysvol/ProgramData/Cylance/Desktop"), - ("dir", "sysvol/ProgramData/Cylance/Optics/Log"), - ("dir", "sysvol/Program Files/Cylance/Desktop/log"), - # ESET - ("dir", "sysvol/Documents and Settings/All Users/Application Data/ESET/ESET NOD32 Antivirus/Logs"), - ("dir", "sysvol/ProgramData/ESET/ESET NOD32 Antivirus/Logs"), - ("dir", "sysvol/ProgramData/ESET/ESET Security/Logs"), - ("dir", "sysvol/ProgramData/ESET/RemoteAdministrator/Agent/EraAgentApplicationData/Logs"), - ("dir", "sysvol/Windows/System32/config/systemprofile/AppData/Local/ESET/ESET Security/Quarantine"), - # Emsisoft - ("glob", "sysvol/ProgramData/Emsisoft/Reports/scan*.txt"), - # F-Secure - ("dir", "sysvol/ProgramData/F-Secure/Log"), - ("dir", "AppData/Local/F-Secure/Log", from_user_home), - ("dir", "sysvol/ProgramData/F-Secure/Antivirus/ScheduledScanReports"), - # HitmanPro - ("dir", "sysvol/ProgramData/HitmanPro/Logs"), - ("dir", "sysvol/ProgramData/HitmanPro.Alert/Logs"), - ("file", "sysvol/ProgramData/HitmanPro.Alert/excalibur.db"), - ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), - # Malwarebytes - ("glob", "sysvol/ProgramData/Malwarebytes/Malwarebytes Anti-Malware/Logs/mbam-log-*.xml"), - ("glob", "sysvol/ProgramData/Malwarebytes/MBAMService/logs/mbamservice.log*"), - ("dir", "AppData/Roaming/Malwarebytes/Malwarebytes Anti-Malware/Logs", from_user_home), - ("dir", "sysvol/ProgramData/Malwarebytes/MBAMService/ScanResults"), - # McAfee - ("dir", "Application Data/McAfee/DesktopProtection", from_user_home), - ("dir", "sysvol/ProgramData/McAfee/DesktopProtection"), - ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs"), - ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs_Old"), - ("dir", "sysvol/ProgramData/Mcafee/VirusScan"), - ("dir", "sysvol/ProgramData/McAfee/MSC/Logs"), - ("dir", "sysvol/ProgramData/McAfee/Agent/AgentEvents"), - ("dir", "sysvol/ProgramData/McAfee/Agent/logs"), - ("dir", "sysvol/ProgramData/McAfee/datreputation/Logs"), - ("dir", "sysvol/ProgramData/Mcafee/Managed/VirusScan/Logs"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Common Framework/AgentEvents"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/MCLOGS/SAE"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/datreputation/Logs"), - ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Managed/VirusScan/Logs"), - ("dir", "sysvol/Program Files (x86)/McAfee/DLP/WCF Service/Log"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Apache2/Logs"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events/Debug"), - ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Server/Logs"), - # RogueKiller - ("glob", "sysvol/ProgramData/RogueKiller/logs/AdliceReport_*.json"), - # SUPERAntiSpyware - ("dir", "AppData/Roaming/SUPERAntiSpyware/Logs", from_user_home), - # SecureAge - ("dir", "sysvol/ProgramData/SecureAge Technology/SecureAge/log"), - # SentinelOne - ("dir", "sysvol/programdata/sentinel/logs"), - # Sophos - ("glob", "sysvol/Documents and Settings/All Users/Application Data/Sophos/Sophos */Logs"), - ("glob", "sysvol/ProgramData/Sophos/Sophos */Logs"), - # Symantec - ( - "dir", - "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Logs/AV", - ), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Logs"), - ("dir", "AppData/Local/Symantec/Symantec Endpoint Protection/Logs", from_user_home), - ("dir", "sysvol/Windows/System32/winevt/logs/Symantec Endpoint Protection Client.evtx"), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/CmnClnt/ccSubSDK"), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/registrationInfo.xml"), - # TotalAV - ("glob", "sysvol/Program Files*/TotalAV/logs"), - ("dir", "sysvol/ProgramData/TotalAV/logs"), - # Trendmicro - ("glob", "sysvol/Program Files*/Trend Micro"), - # VIPRE - ("dir", "sysvol/ProgramData/VIPRE Business Agent/Logs"), - ("dir", "AppData/Roaming/VIPRE Business", from_user_home), - ("dir", "AppData/Roaming/GFI Software/AntiMalware/Logs", from_user_home), - ("dir", "AppData/Roaming/Sunbelt Software/AntiMalware/Logs", from_user_home), - # Webroot - ("file", "sysvol/ProgramData/WRData/WRLog.log"), - # Microsoft Windows Defender - ("dir", "sysvol/ProgramData/Microsoft/Microsoft AntiMalware/Support"), - ("glob", "sysvol/Windows/System32/winevt/Logs/Microsoft-Windows-Windows Defender*.evtx"), - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Support"), - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Scans/History/Service/DetectionHistory"), - ("file", "sysvol/Windows/Temp/MpCmdRun.log"), - ("file", "sysvol/Windows.old/Windows/Temp/MpCmdRun.log"), - ] - - -@register_module("--quarantined") -class QuarantinedFiles(Module): - DESC = "files quarantined by various antivirus products" - SPEC = [ - # Microsoft Defender - # https://knez.github.io/posts/how-to-extract-quarantine-files-from-windows-defender/ - ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Quarantine"), - # Symantec Endpoint Protection - ( - "dir", - "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Quarantine", - ), - ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Quarantine"), - # Trend Micro - # https://secret.inf.ufpr.br/papers/marcus_av_handson.pdf - ("dir", "sysvol/ProgramData/Trend Micro/AMSP/quarantine"), - # McAfee - ("dir", "sysvol/Quarantine"), - ("dir", "sysvol/ProgramData/McAfee/VirusScan/Quarantine"), - # Sophos - ("glob", "sysvol/ProgramData/Sophos/Sophos/*/Quarantine"), - ("glob", "sysvol/ProgramData/Sophos/Sophos */INFECTED"), - ("dir", "sysvol/ProgramData/Sophos/Safestore"), - # HitmanPRO - ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), - ] - - -@register_module("--edr") -class EDR(Module): - DESC = "various Endpoint Detection and Response (EDR) logs" - SPEC = [ - # Carbon Black - ("dir", "sysvol/ProgramData/CarbonBlack/Logs"), - ] - - -@register_module("--history") -class History(Module): - DESC = "browser history from IE, Edge, Firefox, and Chrome" - DIR_COMBINATIONS = namedtuple("DirCombinations", ["root_dirs", "dir_extensions", "history_files"]) - COMMON_DIR_COMBINATIONS = [ - DIR_COMBINATIONS( - [ - # Chromium - RHEL/Ubuntu - DNF/apt - ".config/chromium", - # Chrome - RHEL/Ubuntu - DNF - ".config/google-chrome", - # Edge - RHEL/Ubuntu - DNF/apt - ".config/microsoft-edge", - # Chrome - RHEL/Ubuntu - Flatpak - ".var/app/com.google.Chrome/config/google-chrome", - # Edge - RHEL/Ubuntu - Flatpak - ".var/app/com.microsoft.Edge/config/microsoft-edge", - # Chromium - RHEL/Ubuntu - Flatpak - ".var/app/org.chromium.Chromium/config/chromium", - # Chrome - "AppData/Local/Google/Chrom*/User Data", - # Edge - "AppData/Local/Microsoft/Edge/User Data", - "Library/Application Support/Microsoft Edge", - "Local Settings/Application Data/Microsoft/Edge/User Data", - # Chrome - Legacy - "Library/Application Support/Chromium", - "Library/Application Support/Google/Chrome", - "Local Settings/Application Data/Google/Chrom*/User Data", - # Chromium - RHEL/Ubuntu - snap - "snap/chromium/common/chromium", - # Brave - Windows - "AppData/Local/BraveSoftware/Brave-Browser/User Data", - "AppData/Roaming/BraveSoftware/Brave-Browser/User Data", - # Brave - Linux - ".config/BraveSoftware", - # Brave - MacOS - "Library/Application Support/BraveSoftware", - ], - ["*", "Snapshots/*/*"], - [ - "Archived History", - "Bookmarks", - "Cookies*", - "Network", - "Current Session", - "Current Tabs", - "Extension Cookies", - "Favicons", - "History", - "Last Session", - "Last Tabs", - "Login Data", - "Login Data For Account", - "Media History", - "Shortcuts", - "Snapshots", - "Top Sites", - "Web Data", - ], - ), - ] - - SPEC = [ - # IE - ("dir", "AppData/Local/Microsoft/Internet Explorer/Recovery", from_user_home), - ("dir", "AppData/Local/Microsoft/Windows/INetCookies", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/WebCache/*.dat", from_user_home), - # IE - index.dat - ("file", "Cookies/index.dat", from_user_home), - ("file", "Local Settings/History/History.IE5/index.dat", from_user_home), - ("glob", "Local Settings/History/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "Local Settings/Temporary Internet Files/Content.IE5/index.dat", from_user_home), - ("file", "Local Settings/Application Data/Microsoft/Feeds Cache/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/History/History.IE5/index.dat", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/History/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/index.dat", from_user_home), - ("glob", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/MSHist*/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Content.IE5/index.dat", from_user_home), - ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Low/Content.IE5/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/Cookies/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/Cookies/Low/index.dat", from_user_home), - ("file", "AppData/Roaming/Microsoft/Windows/IEDownloadHistory/index.dat", from_user_home), - # Firefox - Windows - ("glob", "AppData/Local/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - ("glob", "AppData/Roaming/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - ("glob", "Application Data/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), - # Firefox - macOS - ("glob", "/Users/*/Library/Application Support/Firefox/Profiles/*/*.sqlite*"), - # Firefox - RHEL/Ubuntu - Flatpak - ("glob", ".var/app/org.mozilla.firefox/.mozilla/firefox/*/*.sqlite*", from_user_home), - # Firefox - RHEL/Ubuntu - DNF/apt - ("glob", ".mozilla/firefox/*/*.sqlite*", from_user_home), - # Firefox - RHEL/Ubuntu - snap - ("glob", "snap/firefox/common/.mozilla/firefox/*/*.sqlite*", from_user_home), - # Safari - macOS - ("file", "Library/Safari/Bookmarks.plist", from_user_home), - ("file", "Library/Safari/Downloads.plist", from_user_home), - ("file", "Library/Safari/Extensions/Extensions.plist", from_user_home), - ("glob", "Library/Safari/History.*", from_user_home), - ("file", "Library/Safari/LastSession.plist", from_user_home), - ("file", "Library/Caches/com.apple.Safari/Cache.db", from_user_home), - ] - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - for root_dirs, extension_dirs, history_files in cls.COMMON_DIR_COMBINATIONS: - for root_dir, extension_dir, history_file in product(root_dirs, extension_dirs, history_files): - full_path = f"{root_dir}/{extension_dir}/{history_file}" - search_type = "glob" if "*" in full_path else "file" - - spec.add((search_type, full_path, from_user_home)) - - return spec - - -@register_module("--remoteaccess") -class RemoteAccess(Module): - DESC = "common remote access tools' log files" - SPEC = [ - # teamviewer - ("glob", "sysvol/Program Files/TeamViewer/*.log"), - ("glob", "sysvol/Program Files (x86)/TeamViewer/*.log"), - ("glob", "/var/log/teamviewer*/*.log"), - ("glob", "AppData/Roaming/TeamViewer/*.log", from_user_home), - ("glob", "Library/Logs/TeamViewer/*.log", from_user_home), - # anydesk - Windows - ("dir", "sysvol/ProgramData/AnyDesk"), - ("glob", "AppData/Roaming/AnyDesk/*.trace", from_user_home), - ("glob", "AppData/Roaming/AnyDesk/*/*.trace", from_user_home), - # anydesk - Mac + Linux - ("glob", ".anydesk*/*.trace", from_user_home), - ("file", "/var/log/anydesk.trace"), - # zoho - ("dir", "sysvol/ProgramData/ZohoMeeting/log"), - ("dir", "AppData/Local/ZohoMeeting/log", from_user_home), - # realvnc - ("file", "sysvol/ProgramData/RealVNC-Service/vncserver.log"), - ("file", "AppData/Local/RealVNC/vncserver.log", from_user_home), - # tightvnc - ("dir", "sysvol/ProgramData/TightVNC/Server/Logs"), - # Remote desktop cache files - ("dir", "AppData/Local/Microsoft/Terminal Server Client/Cache", from_user_home), - ] - - -@register_module("--webhosting") -class WebHosting(Module): - DESC = "Web hosting software log files" - SPEC = [ - # cPanel - ("dir", "/usr/local/cpanel/logs"), - ("file", ".lastlogin", from_user_home), - ] - - -@register_module("--wer") -class WER(Module): - DESC = "WER (Windows Error Reporting) related files" - - @classmethod - def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: - spec = set() - - for wer_dir in itertools.chain( - ["sysvol/ProgramData/Microsoft/Windows/WER"], - from_user_home(target, "AppData/Local/Microsoft/Windows/WER"), - ): - for path in target.fs.path(wer_dir).rglob("*"): - if not path.is_file(): - continue - - if path.stat().st_size >= (1024 * 1024 * 1024): # 1GB - log.debug("Skipping WER file because it exceeds 1GB: %s", path) - continue - - spec.add(("file", path)) - - return spec - - -@register_module("--etc") -class Etc(Module): - SPEC = [ - # In OS-X /etc is a symlink to /private/etc. To prevent collecting - # duplicates, we only use the /etc directory here. - ("dir", "/etc"), - ("dir", "/usr/local/etc"), - ] - - -@register_module("--boot") -class Boot(Module): - SPEC = [ - ("glob", "/boot/config*"), - ("glob", "/boot/efi*"), - ("glob", "/boot/grub*"), - ("glob", "/boot/init*"), - ("glob", "/boot/system*"), - ] - - -def private_key_filter(path: fsutil.TargetPath) -> bool: - if path.is_file() and not path.is_symlink(): - with path.open("rt") as file: - return "PRIVATE KEY" in file.readline() - - -@register_module("--home") -class Home(Module): - SPEC = [ - # Catches most shell related configuration files - ("glob", ".*[akz]sh*", from_user_home), - ("glob", "*/.*[akz]sh*", from_user_home), - # Added to catch any shell related configuration file not caught with the above glob - ("glob", ".*history", from_user_home), - ("glob", "*/.*history", from_user_home), - ("glob", ".*rc", from_user_home), - ("glob", "*/.*rc", from_user_home), - ("glob", ".*_logout", from_user_home), - ("glob", "*/.*_logout", from_user_home), - # Miscellaneous configuration files - ("dir", ".config", from_user_home), - ("glob", "*/.config", from_user_home), - ("file", ".wget-hsts", from_user_home), - ("glob", "*/.wget-hsts", from_user_home), - ("file", ".gitconfig", from_user_home), - ("glob", "*/.gitconfig", from_user_home), - ("file", ".selected_editor", from_user_home), - ("glob", "*/.selected_editor", from_user_home), - ("file", ".viminfo", from_user_home), - ("glob", "*/.viminfo", from_user_home), - ("file", ".lesshist", from_user_home), - ("glob", "*/.lesshist", from_user_home), - ("file", ".profile", from_user_home), - ("glob", "*/.profile", from_user_home), - # OS-X home (aka /Users) - ("glob", ".bash_sessions/*", from_user_home), - ("glob", "Library/LaunchAgents/*", from_user_home), - ("glob", "Library/Logs/*", from_user_home), - ("glob", "Preferences/*", from_user_home), - ("glob", "Library/Preferences/*", from_user_home), - ] - - -@register_module("--ssh") -@module_arg("--private-keys", action=argparse.BooleanOptionalAction, help="Add any private keys") -class SSH(Module): - SPEC = [ - ("glob", ".ssh/*", from_user_home), - ("glob", "/etc/ssh/*"), - ("glob", "sysvol/ProgramData/ssh/*"), - ] - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - # Acquire SSH configuration in sshd directories - - filter = None if cli_args.private_keys else private_key_filter - - if filter: - log.info("Executing SSH without --private-keys, skipping private keys.") - - with collector.file_filter(filter): - super().run(target, cli_args, collector) - - -@register_module("--var") -class Var(Module): - SPEC = [ - # In OS-X /var is a symlink to /private/var. To prevent collecting - # duplicates, we only use the /var directory here. - ("dir", "/var/log"), - ("dir", "/var/spool/at"), - ("dir", "/var/spool/cron"), - ("dir", "/var/spool/anacron"), - ("dir", "/var/lib/dpkg/status"), - ("dir", "/var/lib/rpm"), - ("dir", "/var/db"), - ("dir", "/var/audit"), - ("dir", "/var/cron"), - ("dir", "/var/run"), - # some OS-X specific files - ("dir", "/private/var/at"), - ("dir", "/private/var/db/diagnostics"), - ("dir", "/private/var/db/uuidtext"), - ("file", "/private/var/vm/sleepimage"), - ("glob", "/private/var/vm/swapfile*"), - ("glob", "/private/var/folders/*/*/0/com.apple.notificationcenter/*/*"), - # user specific cron on OS-X - ("dir", "/usr/lib/cron"), - ] - - -@register_module("--bsd") -class BSD(Module): - SPEC = [ - ("file", "/bin/freebsd-version"), - ("dir", "/usr/ports"), - ] - - -@register_module("--osx") -class OSX(Module): - DESC = "OS-X specific files and directories" - SPEC = [ - # filesystem events - ("dir", "/.fseventsd"), - # kernel extensions - ("dir", "/Library/Extensions"), - ("dir", "/System/Library/Extensions"), - # logs - ("dir", "/Library/Logs"), - # autorun locations - ("dir", "/Library/LaunchAgents"), - ("dir", "/Library/LaunchDaemons"), - ("dir", "/Library/StartupItems"), - ("dir", "/System/Library/LaunchAgents"), - ("dir", "/System/Library/LaunchDaemons"), - ("dir", "/System/Library/StartupItems"), - # installed software - ("dir", "/Library/Receipts/InstallHistory.plist"), - ("file", "/System/Library/CoreServices/SystemVersion.plist"), - # system preferences - ("dir", "/Library/Preferences"), - # DHCP settings - ("dir", "/private/var/db/dhcpclient/leases"), - ] - - -@register_module("--osx-applications-info") -class OSXApplicationsInfo(Module): - DESC = "OS-X info.plist from all installed applications" - SPEC = [ - ("glob", "/Applications/*/Contents/Info.plist"), - ("glob", "Applications/*/Contents/Info.plist", from_user_home), - ] - - -@register_module("--bootbanks") -class Bootbanks(Module): - DESC = "ESXi bootbanks" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - # Both ESXi 6 and 7 compatible - boot_dirs = { - "boot": "BOOT", - "bootbank": "BOOTBANK1", - "altbootbank": "BOOTBANK2", - } - boot_fs = {} - - for boot_dir, boot_vol in boot_dirs.items(): - dir_path = target.fs.path(boot_dir) - if dir_path.is_symlink() and dir_path.exists(): - dst = dir_path.readlink() - fs = dst.get().top.fs - boot_fs[fs] = boot_vol - - for fs, mountpoint, uuid, _ in iter_esxi_filesystems(target): - if fs in boot_fs: - name = boot_fs[fs] - log.info("Acquiring %s (%s)", mountpoint, name) - mountpoint_len = len(mountpoint) - base = f"fs/{uuid}:{name}" - for path in target.fs.path(mountpoint).rglob("*"): - outpath = path.as_posix()[mountpoint_len:] - collector.collect_path(path, outpath=outpath, base=base) - - -@register_module("--esxi") -class ESXi(Module): - DESC = "ESXi interesting files" - SPEC = [ - ("dir", "/scratch/log"), - ("dir", "/locker/packages/var"), - # ESXi 7 - ("dir", "/scratch/cache"), - ("dir", "/scratch/vmkdump"), - # ESXi 6 - ("dir", "/scratch/vmware"), - ] - - -@register_module("--vmfs") -class VMFS(Module): - DESC = "ESXi VMFS metadata files" - - @classmethod - def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - for fs, mountpoint, uuid, name in iter_esxi_filesystems(target): - if not fs.__type__ == "vmfs": - continue - - log.info("Acquiring %s (%s)", mountpoint, name) - mountpoint_len = len(mountpoint) - base = f"fs/{uuid}:{name}" - for path in target.fs.path(mountpoint).glob("*.sf"): - outpath = path.as_posix()[mountpoint_len:] - collector.collect_path(path, outpath=outpath, base=base) - - -@register_module("--activities-cache") -class ActivitiesCache(Module): - DESC = "user's activities caches" - SPEC = [ - ("dir", "AppData/Local/ConnectedDevicesPlatform", from_user_home), - ] - - -@register_module("--hashes") -@module_arg( - "--hash-func", - action="append", - type=HashFunc, - choices=[h.value for h in HashFunc], - help="Hash function to use", -) -@module_arg("--dir-to-hash", action="append", help="Hash only files in a provided directory") -@module_arg("--ext-to-hash", action="append", help="Hash only files with the extensions provided") -@module_arg("--glob-to-hash", action="append", help="Hash only files that match provided glob") -class FileHashes(Module): - DESC = "file hashes" - - DEFAULT_HASH_FUNCS = (HashFunc.MD5, HashFunc.SHA1, HashFunc.SHA256) - DEFAULT_EXTENSIONS = ( - "bat", - "cmd", - "com", - "dll", - "exe", - "installlog", - "installutil", - "js", - "lnk", - "ps1", - "sys", - "tlb", - "vbs", - ) - DEFAULT_PATHS = ("sysvol/Windows/",) - - MAX_FILE_SIZE_BYTES = 100 * 1024 * 1024 # 100MB - - DEFAULT_FILE_FILTERS = ( - functools.partial(filter_out_by_path_match, re_pattern="^/(sysvol/)?Windows/WinSxS/"), - functools.partial(filter_out_huge_files, max_size_bytes=MAX_FILE_SIZE_BYTES), - functools.partial(filter_out_by_value_match, value=b"MZ", offsets=[0, 3]), - ) - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - log.info("*** Acquiring file hashes") - - specs = cls.get_specs(cli_args) - - with collector.bind_module(cls): - start = time.time() - - path_hashes = collect_hashes(target, specs, path_filters=cls.DEFAULT_FILE_FILTERS) - rows_count, csv_compressed_bytes = serialize_into_csv(path_hashes, compress=True) - - collector.write_bytes( - f"{collector.base}/{collector.METADATA_BASE}/file-hashes.csv.gz", - csv_compressed_bytes, - ) - log.info("Hashing is done, %s files processed in %.2f secs", rows_count, (time.time() - start)) - - @classmethod - def get_specs(cls, cli_args: argparse.Namespace) -> Iterator[tuple]: - path_selectors = [] - - if cli_args.ext_to_hash: - extensions = cli_args.ext_to_hash - else: - extensions = cls.DEFAULT_EXTENSIONS - - if cli_args.dir_to_hash or cli_args.glob_to_hash: - if cli_args.glob_to_hash: - path_selectors.extend([("glob", glob) for glob in cli_args.glob_to_hash]) - - if cli_args.dir_to_hash: - path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cli_args.dir_to_hash]) - - else: - path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cls.DEFAULT_PATHS]) - - if cli_args.hash_func: - hash_funcs = cli_args.hash_func - else: - hash_funcs = cls.DEFAULT_HASH_FUNCS - - return [(path_selector, hash_funcs) for path_selector in path_selectors] - - -@register_module("--handles") -@module_arg( - "--handle-types", - action="extend", - help="Collect only specified handle types", - type=NamedObjectType, - choices=[h.value for h in NamedObjectType], - nargs="*", -) -@local_module -class OpenHandles(Module): - DESC = "Open handles" - - @classmethod - def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: - if not sys.platform == "win32": - log.error("Open Handles plugin can only run on Windows systems! Skipping...") - return - - from acquire.dynamic.windows.collect import collect_open_handles - from acquire.dynamic.windows.handles import serialize_handles_into_csv - - log.info("*** Acquiring open handles") - - handle_types = cli_args.handle_types - - with collector.bind_module(cls): - handles = collect_open_handles(handle_types) - csv_compressed_handles = serialize_handles_into_csv(handles) - - collector.write_bytes( - f"{collector.base}/{collector.METADATA_BASE}/open_handles.csv.gz", - csv_compressed_handles, - ) - log.info("Collecting open handles is done.") - - -def print_disks_overview(target: Target) -> None: - log.info("// Disks") - try: - for disk in target.disks: - log.info("%s", disk) - if not disk.vs: - continue - - for volume in disk.vs.volumes: - log.info("- %s", volume) - except Exception: - log.error("Failed to iterate disks") - log.info("") - - -def print_volumes_overview(target: Target) -> None: - log.info("// Volumes") - try: - for volume in target.volumes: - log.info("%s", volume) - except Exception: - log.error("Failed to iterate volumes") - log.info("") - - -def print_acquire_warning(target: Target) -> None: - if target.os != "windows": - log.warning("========================================== WARNING ==========================================") - log.warning("") - log.warning( - "The support for operating system '%s' is experimental. Some artifacts may not yet be included and some ", - target.os, - ) - log.warning("features may not work as expected. Please notify upstream for any missing artifacts or features.") - log.warning("") - log.warning("========================================== WARNING ==========================================") - - -def _add_modules_for_profile(choice: str, operating_system: str, profile: dict, msg: str) -> Optional[dict]: - modules_selected = dict() - - if choice and choice != "none": - profile_dict = profile[choice] - if operating_system not in profile_dict: - log.error(msg, operating_system, choice) - return None - - for mod in profile_dict[operating_system]: - modules_selected[mod.__modname__] = mod - - return modules_selected - - -def acquire_target(target: Target, args: argparse.Namespace, output_ts: Optional[str] = None) -> list[str | Path]: - acquire_gui = GUI() - files = [] - output_ts = output_ts or get_utc_now_str() - if args.log_to_dir: - log_file = args.log_path.joinpath(format_output_name("Unknown", output_ts, "log")) - # This will also rename the log file on disk, which was opened in main(), if the name is different - reconfigure_log_file(log, log_file, delay=True) - else: - log_file = args.log_path - - skip_list = set() - if log_file: - files.append(log_file) - if target.path.name == "local": - skip_list.add(normalize_path(target, log_file, resolve_parents=True, preserve_case=False)) - - print_disks_overview(target) - print_volumes_overview(target) - - if not target._os_plugin: - log.error("Error: Unable to detect OS") - return files - - hostname = "" - try: - hostname = target.hostname - except Exception: - log.exception("Failed to get hostname") - - version = None - try: - version = target.version - except Exception: - log.exception("Failed to detect OS version") - - if version is None: - os_plugin_name = target._os_plugin.__name__.lower() - version = f"{target.os} ({os_plugin_name})" - - log.info("Target name: %s", target.name) - log.info("Hostname: %s", hostname) - log.info("OS: %s", version) - log.info("") - - print_acquire_warning(target) - - modules_selected = {} - modules_successful = [] - modules_failed = {} - for name, mod in MODULES.items(): - name_slug = name.lower() - # check if module was set in the arguments provided - if getattr(args, name_slug): - modules_selected[name] = mod - - profile = args.profile - - # Set profile to default if no profile, modules, files, directories or globes were selected - if not profile and not modules_selected and not args.file and not args.directory and not args.glob: - log.info("Using default collection profile") - profile = "default" - log.info("") - - profile_modules = _add_modules_for_profile( - profile, target.os, PROFILES, "No collection set for OS %s with profile %s" - ) - - if not (volatile_profile := args.volatile_profile): - volatile_profile = "none" - - volatile_modules = _add_modules_for_profile( - volatile_profile, target.os, VOLATILE, "No collection set for OS %s with volatile profile %s" - ) - - if (profile_modules or volatile_modules) is None: - return files - - modules_selected.update(profile_modules) - modules_selected.update(volatile_modules) - - log.info("Modules selected: %s", ", ".join(sorted(modules_selected))) - - local_only_modules = {name: module for name, module in modules_selected.items() if hasattr(module, "__local__")} - if target.path.name != "local" and local_only_modules: - for name, module in local_only_modules.items(): - modules_failed[module.__name__] = "Not running on a local target" - log.error( - "Can not use local-only modules with non-local targets. Skipping: %s", - " ".join(sorted(local_only_modules.keys())), - ) - log.info("") - # Remove local-only modules from the modules list - modules_selected = dict(modules_selected.items() - local_only_modules.items()) - - log_file_handler = get_file_handler(log) - # Prepare log file and output file names - if log_file_handler and args.log_to_dir: - log_file = format_output_name(target.name, output_ts, "log") - # This will also rename the log file on disk, which was opened and written previously. - log_file_handler.set_filename(log_file) - log_path = Path(log_file_handler.baseFilename).resolve() - log.info("Logging to file %s", log_path) - files = [log_file_handler.baseFilename] - if target.path.name == "local": - skip_list = {normalize_path(target, log_path, resolve_parents=True, preserve_case=False)} - - output_path = args.output or args.output_file - if output_path.is_dir(): - output_dir = format_output_name(target.name, output_ts) - output_path = output_path.joinpath(output_dir) - output_path = output_path.resolve() - - output = OUTPUTS[args.output_type]( - output_path, - compress=args.compress, - compression_method=args.compress_method, - encrypt=args.encrypt, - public_key=args.public_key, - ) - files.append(output.path) - if target.path.name == "local": - skip_list.add(normalize_path(target, output.path, resolve_parents=True, preserve_case=False)) - - log.info("Writing output to %s", output.path) - if skip_list: - log.info("Skipping own files: %s", ", ".join(skip_list)) - log.info("") - - dir_base = "fs" - if target.os != "windows": - dir_base = "fs/$rootfs$" - - with Collector(target, output, base=dir_base, skip_list=skip_list) as collector: - # Acquire specified files - if args.file or args.directory or args.glob: - log.info("*** Acquiring specified paths") - spec = [] - - if args.file: - for path in args.file: - spec.append(("file", path.strip())) - - if args.directory: - for path in args.directory: - spec.append(("dir", path.strip())) - - if args.glob: - for path in args.glob: - spec.append(("glob", path.strip())) - - collector.collect(spec, module_name=CLI_ARGS_MODULE) - modules_successful.append(CLI_ARGS_MODULE) - log.info("") - - # Run modules (sort first based on execution order) - modules_selected = sorted(modules_selected.items(), key=lambda module: module[1].EXEC_ORDER) - count = 0 - for name, mod in modules_selected: - try: - mod.run(target, args, collector) - - modules_successful.append(mod.__name__) - except Exception: - log.error("Error while running module %s", name, exc_info=True) - modules_failed[mod.__name__] = get_formatted_exception() - - acquire_gui.progress = (acquire_gui.shard // len(modules_selected)) * count - count += 1 - - log.info("") - - collection_report = collector.report - - log.info("Done collecting artifacts:") - - # prepare and render full report only if logging level is more permissive than INFO - if log.level < logging.INFO: - log.debug(get_full_formatted_report(collection_report)) - - log.info(get_report_summary(collection_report)) - - if not args.disable_report: - collection_report_serialized = collection_report.get_records_per_module_per_outcome(serialize_records=True) - - execution_report = { - "target": str(target), - "name": target.name, - "timestamp": get_utc_now().isoformat(), - "modules-successful": modules_successful, - "modules-failed": modules_failed, - **collection_report_serialized, - } - - if args.output: - report_file_name = format_output_name(target.name, postfix=output_ts, ext="report.json") - else: - report_file_name = f"{output_path.name}.report.json" - - report_file_path = output_path.parent / report_file_name - persist_execution_report(report_file_path, execution_report) - - files.append(report_file_path) - log.info("Acquisition report for %s is written to %s", target, report_file_path) - - log.info("Output: %s", output.path) - return files - - -def upload_files(paths: list[str | Path], upload_plugin: UploaderPlugin, no_proxy: bool = False) -> None: - proxies = None if no_proxy else urllib.request.getproxies() - log.debug("Proxies: %s (no_proxy = %s)", proxies, no_proxy) - - log.info('Uploading files: "%s"', " ".join(map(str, paths))) - try: - upload_files_using_uploader(upload_plugin, paths, proxies) - except Exception: - log.error('Upload FAILED for files: "%s". See log file for details.', " ".join(map(str, paths))) - raise - else: - log.info("Upload succeeded.") - - -class WindowsProfile: - MINIMAL = [ - NTFS, - EventLogs, - Registry, - Tasks, - PowerShell, - Prefetch, - Appcompat, - PCA, - Misc, - Startup, - ] - DEFAULT = [ - *MINIMAL, - ETL, - Recents, - RecycleBin, - Drivers, - Syscache, - WBEM, - AV, - BITS, - DHCP, - DNS, - ActiveDirectory, - RemoteAccess, - ActivitiesCache, - ] - FULL = [ - *DEFAULT, - History, - NTDS, - QuarantinedFiles, - WindowsNotifications, - SSH, - IIS, - TextEditor, - ] - - -class LinuxProfile: - MINIMAL = [ - Etc, - Boot, - Home, - SSH, - Var, - ] - DEFAULT = MINIMAL - FULL = [ - *DEFAULT, - History, - WebHosting, - ] - - -class BsdProfile: - MINIMAL = [ - Etc, - Boot, - Home, - SSH, - Var, - BSD, - ] - DEFAULT = MINIMAL - FULL = MINIMAL - - -class ESXiProfile: - MINIMAL = [ - Bootbanks, - ESXi, - SSH, - ] - DEFAULT = [ - *MINIMAL, - VMFS, - ] - FULL = DEFAULT - - -class OSXProfile: - MINIMAL = [ - Etc, - Home, - Var, - OSX, - OSXApplicationsInfo, - ] - DEFAULT = MINIMAL - FULL = [ - *DEFAULT, - History, - SSH, - ] - - -PROFILES = { - "full": { - "windows": WindowsProfile.FULL, - "linux": LinuxProfile.FULL, - "bsd": BsdProfile.FULL, - "esxi": ESXiProfile.FULL, - "osx": OSXProfile.FULL, - }, - "default": { - "windows": WindowsProfile.DEFAULT, - "linux": LinuxProfile.DEFAULT, - "bsd": BsdProfile.DEFAULT, - "esxi": ESXiProfile.DEFAULT, - "osx": OSXProfile.DEFAULT, - }, - "minimal": { - "windows": WindowsProfile.MINIMAL, - "linux": LinuxProfile.MINIMAL, - "bsd": BsdProfile.MINIMAL, - "esxi": ESXiProfile.MINIMAL, - "osx": OSXProfile.MINIMAL, - }, - "none": None, -} - - -class VolatileProfile: - DEFAULT = [ - Netstat, - WinProcesses, - WinProcEnv, - WinArpCache, - WinRDPSessions, - WinDnsClientCache, - ] - EXTENSIVE = [ - Proc, - Sys, - ] - - -VOLATILE = { - "default": { - "windows": VolatileProfile.DEFAULT, - "linux": [], - "bsd": [], - "esxi": [], - "osx": [], - }, - "extensive": { - "windows": VolatileProfile.DEFAULT, - "linux": VolatileProfile.EXTENSIVE, - "bsd": VolatileProfile.EXTENSIVE, - "esxi": VolatileProfile.EXTENSIVE, - "osx": [], - }, - "none": None, -} - - -def exit_success(default_args: list[str]): - log.info("Acquire finished successful") - log.info("Arguments: %s", " ".join(sys.argv[1:])) - log.info("Default Arguments: %s", " ".join(default_args)) - log.info("Exiting with status code 0 (SUCCESS)") - sys.exit(0) - - -def exit_failure(default_args: list[str]): - log.error("Acquire FAILED") - log.error("Arguments: %s", " ".join(sys.argv[1:])) - log.error("Default Arguments: %s", " ".join(default_args)) - log.error("Exiting with status code 1 (FAILURE)") - sys.exit(1) - - -def main() -> None: - parser = create_argument_parser(PROFILES, VOLATILE, MODULES) - args, rest = parse_acquire_args(parser, config=CONFIG) - - # Since output has a default value, set it to None when output_file is defined - if args.output_file: - args.output = None - - try: - check_and_set_log_args(args) - except ValueError as err: - parser.exit(err) - - if args.log_to_dir: - # When args.upload files are specified, only these files are uploaded - # and no other action is done. Thus a log file specifically named - # Upload_.log is created - file_prefix = "Upload" if args.upload else "Unknown" - log_file = args.log_path.joinpath(format_output_name(file_prefix, args.start_time, "log")) - else: - log_file = args.log_path - - setup_logging(log, log_file, args.verbose, delay=args.log_delay) - - acquire_successful = True - files_to_upload = [log_file] - acquire_gui = None - try: - log.info(ACQUIRE_BANNER) - log.info("User: %s | Admin: %s", get_user_name(), is_user_admin()) - log.info("Arguments: %s", " ".join(sys.argv[1:])) - log.info("Default Arguments: %s", " ".join(args.config.get("arguments"))) - log.info("") - - # start GUI if requested through CLI / config - flavour = None - if args.gui == "always" or ( - args.gui == "depends" and os.environ.get("PYS_KEYSOURCE") == "prompt" and len(sys.argv) == 1 - ): - flavour = platform.system() - acquire_gui = GUI(flavour=flavour, upload_available=args.auto_upload) - - args.output, args.auto_upload, cancel = acquire_gui.wait_for_start(args) - if cancel: - log.info("Acquire cancelled") - exit_success(args.config.get("arguments")) - # From here onwards, the GUI will be locked and cannot be closed because we're acquiring - - plugins_to_load = [("cloud", MinIO)] - upload_plugins = UploaderRegistry("acquire.plugins", plugins_to_load) - - check_and_set_acquire_args(args, upload_plugins) - - if args.upload: - try: - upload_files(args.upload, args.upload_plugin, args.no_proxy) - except Exception as err: - acquire_gui.message("Failed to upload files") - log.exception(err) - exit_failure(args.config.get("arguments")) - exit_success(args.config.get("arguments")) - - target_paths = [] - for target_path in args.targets: - target_path = args_to_uri([target_path], args.loader, rest)[0] if args.loader else target_path - if target_path == "local": - target_query = {} - if args.force_fallback: - target_query.update({"force-directory-fs": 1}) - - if args.fallback: - target_query.update({"fallback-to-directory-fs": 1}) - - target_query = urllib.parse.urlencode(target_query) - target_path = f"{target_path}?{target_query}" - target_paths.append(target_path) - - try: - target_name = "Unknown" # just in case open_all already fails - for target in Target.open_all(target_paths): - target_name = "Unknown" # overwrite previous target name - target_name = target.name - log.info("Loading target %s", target_name) - log.info(target) - if target.os == "esxi" and target.name == "local": - # Loader found that we are running on an esxi host - # Perform operations to "enhance" memory - with esxi_memory_context_manager(): - files_to_upload = acquire_children_and_targets(target, args) - else: - files_to_upload = acquire_children_and_targets(target, args) - except Exception: - log.error("Failed to acquire target: %s", target_name) - if not is_user_admin(): - log.error("Try re-running as administrator/root") - acquire_gui.message("This application must be run as administrator.") - raise - - files_to_upload = sort_files(files_to_upload) - - except Exception as err: - log.error("Acquiring artifacts FAILED") - log.exception(err) - acquire_successful = False - else: - log.info("Acquiring artifacts succeeded") - - try: - # The auto-upload of files is done at the very very end to make sure any - # logged exceptions are written to the log file before uploading. - # This means that any failures from this point on will not be part of the - # uploaded log files, they will be written to the logfile on disk though. - if args.auto_upload and args.upload_plugin and files_to_upload: - try: - log_file_handler = get_file_handler(log) - if log_file_handler: - log_file_handler.close() - - upload_files(files_to_upload, args.upload_plugin) - except Exception: - if acquire_gui: - acquire_gui.message("Failed to upload files") - raise - - if acquire_gui: - acquire_gui.finish() - acquire_gui.wait_for_quit() - - except Exception as err: - acquire_successful = False - log.exception(err) - - if acquire_successful: - exit_success(args.config.get("arguments")) - else: - exit_failure(args.config.get("arguments")) - - -def load_child(target: Target, child_path: Path) -> None: - log.info("") - log.info("Loading child target %s", child_path) - try: - child = target.open_child(child_path) - log.info(target) - except Exception: - log.exception("Failed to load child target") - raise - - return child - - -def acquire_children_and_targets(target: Target, args: argparse.Namespace) -> list[str | Path]: - if args.child: - target = load_child(target, args.child) - - log.info("") - - files = [] - acquire_gui = GUI() - - counter = 0 - progress_limit = 50 if args.auto_upload else 90 - total_targets = 0 - if args.children: - total_targets += len(list(target.list_children())) - - if (args.children and not args.skip_parent) or not args.children: - total_targets += 1 - counter += 1 - acquire_gui.shard = int((progress_limit / total_targets) * counter) - try: - files.extend(acquire_target(target, args, args.start_time)) - - except Exception: - log.error("Failed to acquire main target") - acquire_gui.message("Failed to acquire target") - acquire_gui.wait_for_quit() - raise - - if args.children: - for child in target.list_children(): - counter += 1 - acquire_gui.shard = int((progress_limit / total_targets) * counter) - try: - child_target = load_child(target, child.path) - except Exception: - continue - - log.info("") - - try: - child_files = acquire_target(child_target, args) - files.extend(child_files) - except Exception: - log.exception("Failed to acquire child target %s", child_target.name) - acquire_gui.message("Failed to acquire child target") - continue - - return files - - -def sort_files(files: list[Union[str, Path]]) -> list[Path]: - log_files: list[Path] = [] - tar_paths: list[Path] = [] - report_paths: list[Path] = [] - - suffix_map = {".log": log_files, ".json": report_paths} - - for file in files: - if isinstance(file, str): - file = Path(file) - - suffix_map.get(file.suffix, tar_paths).append(file) - - # Reverse log paths, as the first one in ``files`` is the main one. - log_files.reverse() - - return tar_paths + report_paths + log_files - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - sys.exit(1) - except Exception: - sys.exit(1) +from __future__ import annotations + +import argparse +import enum +import functools +import io +import itertools +import logging +import os +import platform +import shutil +import subprocess +import sys +import time +import urllib.parse +import urllib.request +from collections import defaultdict, namedtuple +from itertools import product +from pathlib import Path +from typing import BinaryIO, Callable, Iterator, Optional, Union + +from dissect.target import Target +from dissect.target.filesystem import Filesystem +from dissect.target.filesystems import ntfs +from dissect.target.helpers import fsutil +from dissect.target.plugins.apps.webserver import iis +from dissect.target.plugins.os.windows.log import evt, evtx +from dissect.target.tools.utils import args_to_uri +from dissect.util.stream import RunlistStream + +from acquire.collector import Collector, get_full_formatted_report, get_report_summary +from acquire.dynamic.windows.named_objects import NamedObjectType +from acquire.dynamic.windows.arp import ( + NetAdapter, + get_windows_network_adapters, + get_windows_net_neighbors, + format_net_neighbors_list +) +from acquire.dynamic.windows.netstat import ( + NetConnection, + get_active_connections, + format_net_connections_list +) +from acquire.esxi import esxi_memory_context_manager +from acquire.gui import GUI +from acquire.hashes import ( + HashFunc, + collect_hashes, + filter_out_by_path_match, + filter_out_by_value_match, + filter_out_huge_files, + serialize_into_csv, +) +from acquire.log import get_file_handler, reconfigure_log_file, setup_logging +from acquire.outputs import OUTPUTS +from acquire.uploaders.minio import MinIO +from acquire.uploaders.plugin import UploaderPlugin, upload_files_using_uploader +from acquire.uploaders.plugin_registry import UploaderRegistry +from acquire.utils import ( + check_and_set_acquire_args, + check_and_set_log_args, + create_argument_parser, + format_output_name, + get_formatted_exception, + get_user_name, + get_utc_now, + get_utc_now_str, + is_user_admin, + normalize_path, + parse_acquire_args, + persist_execution_report, +) + +try: + from acquire.version import version +except ImportError: + version = "0.0.dev" + +try: + # Injected by pystandalone builder + from acquire.config import CONFIG +except ImportError: + CONFIG = defaultdict(lambda: None) + + +VERSION = version +ACQUIRE_BANNER = r""" + _ + __ _ ___ __ _ _ _(_)_ __ ___ + / _` |/ __/ _` | | | | | '__/ _ \ +| (_| | (_| (_| | |_| | | | | __/ + \__,_|\___\__, |\__,_|_|_| \___| + by Fox-IT |_| v{} + part of NCC Group +""".format( + VERSION +)[ + 1: +] + +MODULES = {} +MODULE_LOOKUP = {} + +CLI_ARGS_MODULE = "cli-args" + +log = logging.getLogger("acquire") +log.propagate = 0 +log_file_handler = None +logging.lastResort = None +logging.raiseExceptions = False + + +def misc_windows_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + misc_dirs = { + ("Windows/ServiceProfiles/LocalService", False), + ("Windows/ServiceProfiles/NetworkService", False), + ("Windows/System32/config/systemprofile", False), + ("Users", True), + ("Documents and Settings", True), + } + + for fs in target.fs.path().iterdir(): + if fs.name.lower() == "c:": + continue + + for misc_dir, get_subdirs in misc_dirs: + misc_path = fs.joinpath(misc_dir) + + if not misc_path.exists(): + continue + + if get_subdirs: + for entry in misc_path.iterdir(): + if entry.is_dir(): + yield entry + else: + yield misc_path + + +def misc_unix_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + user_dirs = ["root", "home/*"] + + home_dirs = (target.fs.path("/").glob(path) for path in user_dirs) + for home_dir in itertools.chain.from_iterable(home_dirs): + yield home_dir + + +def misc_osx_user_homes(target: Target) -> Iterator[fsutil.TargetPath]: + for homedir in itertools.chain(target.fs.path("/Users/").glob("*"), misc_unix_user_homes(target)): + yield homedir + + +MISC_MAPPING = { + "osx": misc_osx_user_homes, + "windows": misc_windows_user_homes, +} + + +def from_user_home(target: Target, path: str) -> Iterator[str]: + try: + for user_details in target.user_details.all_with_home(): + yield user_details.home_path.joinpath(path).as_posix() + except Exception as e: + log.warning("Error occurred when requesting all user homes") + log.debug("", exc_info=e) + + misc_user_homes = MISC_MAPPING.get(target.os, misc_unix_user_homes) + for user_dir in misc_user_homes(target): + yield user_dir.joinpath(path).as_posix() + + +def iter_ntfs_filesystems(target: Target) -> Iterator[tuple[ntfs.NtfsFilesystem, Optional[str], str, str]]: + mount_lookup = defaultdict(list) + for mount, fs in target.fs.mounts.items(): + mount_lookup[fs].append(mount) + + for fs in target.filesystems: + # The attr check is needed to correctly collect fake NTFS filesystems + # where the MFT etc. are added to a VirtualFilesystem. This happens for + # instance when the target is an acquired tar target. + if not isinstance(fs, ntfs.NtfsFilesystem) and not hasattr(fs, "ntfs"): + log.warning("Skipping %s - not an NTFS filesystem", fs) + continue + + if fs in mount_lookup: + mountpoints = mount_lookup[fs] + + for main_mountpoint in mountpoints: + if main_mountpoint != "sysvol": + break + + name = main_mountpoint + mountpoints = ", ".join(mountpoints) + else: + main_mountpoint = None + name = f"vol-{fs.ntfs.serial:x}" + mountpoints = "No mounts" + log.warning("Unmounted NTFS filesystem found %s (%s)", fs, name) + + yield fs, main_mountpoint, name, mountpoints + + +def iter_esxi_filesystems(target: Target) -> Iterator[tuple[Filesystem, str, str, Optional[str]]]: + for mount, fs in target.fs.mounts.items(): + if not mount.startswith("/vmfs/volumes/"): + continue + + uuid = mount[len("/vmfs/volumes/") :] # strip /vmfs/volumes/ + name = None + if fs.__type__ == "fat": + name = fs.volume.name + elif fs.__type__ == "vmfs": + name = fs.vmfs.label + + yield fs, mount, uuid, name + + +def register_module(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: + def wrapper(module_cls: type[Module]) -> type[Module]: + name = module_cls.__name__ + + if name in MODULES: + raise ValueError( + f"Module name is already registered: registration for {module_cls} conflicts with {MODULES[name]}" + ) + + desc = module_cls.DESC or name + kwargs["help"] = f"acquire {desc}" + kwargs["action"] = argparse.BooleanOptionalAction + kwargs["dest"] = name.lower() + module_cls.__modname__ = name + + if not hasattr(module_cls, "__cli_args__"): + module_cls.__cli_args__ = [] + module_cls.__cli_args__.append((args, kwargs)) + + MODULES[name] = module_cls + return module_cls + + return wrapper + + +def module_arg(*args, **kwargs) -> Callable[[type[Module]], type[Module]]: + def wrapper(module_cls: type[Module]) -> type[Module]: + if not hasattr(module_cls, "__cli_args__"): + module_cls.__cli_args__ = [] + module_cls.__cli_args__.append((args, kwargs)) + return module_cls + + return wrapper + + +def local_module(cls: type[object]) -> object: + """A decorator that sets property `__local__` on a module class to mark it for local target only""" + cls.__local__ = True + return cls + + +class ExecutionOrder(enum.IntEnum): + TOP = 0 + DEFAULT = 1 + BOTTOM = 2 + + +class Module: + DESC = None + SPEC = [] + EXEC_ORDER = ExecutionOrder.DEFAULT + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + desc = cls.DESC or cls.__name__.lower() + log.info("*** Acquiring %s", desc) + + with collector.bind_module(cls): + collector.collect(cls.SPEC) + + spec_ext = cls.get_spec_additions(target, cli_args) + if spec_ext: + collector.collect(list(spec_ext)) + + cls._run(target, cli_args, collector) + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + pass + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + pass + + +@register_module("--sys") +@local_module +class Sys(Module): + DESC = "Sysfs files (live systems only)" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + spec = [("dir", "/sys")] + collector.collect(spec, follow=False, volatile=True) + + +@register_module("--proc") +@local_module +class Proc(Module): + DESC = "Procfs files (live systems only)" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + spec = [("dir", "/proc")] + collector.collect(spec, follow=False, volatile=True) + + +@register_module("-n", "--ntfs") +class NTFS(Module): + DESC = "NTFS filesystem metadata" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): + log.info("Acquiring from %s as %s (%s)", fs, name, mountpoints) + + for filename in ("$MFT", "$Boot", "$Secure:$SDS"): + if main_mountpoint is not None: + path = fsutil.join(main_mountpoint, filename) + collector.collect_path(path) + + else: + # In case the NTFS filesystem is not mounted, which should not occur but + # iter_ntfs_filesystems allows for the possibility, we fall back to raw file + # collection. + collector.collect_file_raw(filename, fs, name) + + cls.collect_usnjrnl(collector, fs, name) + + @classmethod + def collect_usnjrnl(cls, collector: Collector, fs: Filesystem, name: str) -> None: + def usnjrnl_accessor(journal: BinaryIO) -> tuple[BinaryIO, int]: + # If the filesystem is a virtual NTFS filesystem, journal will be + # plain BinaryIO, not a RunlistStream. + if isinstance(journal, RunlistStream): + i = 0 + while journal.runlist[i][0] is None: + journal.seek(journal.runlist[i][1] * journal.block_size, io.SEEK_CUR) + i += 1 + size = journal.size - journal.tell() + else: + size = journal.size + + return (journal, size) + + collector.collect_file_raw( + "$Extend/$Usnjrnl:$J", + fs, + name, + file_accessor=usnjrnl_accessor, + ) + + +@register_module("-r", "--registry") +class Registry(Module): + DESC = "registry hives" + HIVES = ["drivers", "sam", "security", "software", "system", "default"] + SPEC = [ + ("dir", "sysvol/windows/system32/config/txr"), + ("dir", "sysvol/windows/system32/config/regback"), + ("glob", "sysvol/System Volume Information/_restore*/RP*/snapshot/_REGISTRY_*"), + ("glob", "ntuser.dat*", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/UsrClass.dat*", from_user_home), + ("glob", "Local Settings/Application Data/Microsoft/Windows/UsrClass.dat*", from_user_home), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + # Glob all hives to include e.g. .LOG files and .regtrans-ms files. + files = [] + for hive in cls.HIVES: + pattern = "sysvol/windows/system32/config/{}*".format(hive) + for entry in target.fs.path().glob(pattern): + if entry.is_file(): + files.append(("file", entry)) + return files + + +@register_module("--netstat") +@local_module +class Netstat(Module): + DESC = "Windows network connections" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + net_connections: list[NetConnection] = get_active_connections() + output = format_net_connections_list(net_connections) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "netstat") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) + + +@register_module("--win-processes") +@local_module +class WinProcesses(Module): + DESC = "Windows process list" + SPEC = [ + ("command", (["tasklist", "/V", "/fo", "csv"], "win-processes")), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--win-proc-env") +@local_module +class WinProcEnv(Module): + DESC = "Process environment variables" + SPEC = [ + ( + "command", + ( + ["PowerShell", "-command", "Get-Process | ForEach-Object {$_.StartInfo.EnvironmentVariables}"], + "win-process-env-vars", + ), + ), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--win-arp-cache") +@local_module +class WinArpCache(Module): + DESC = "ARP Cache" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + network_adapters: list[NetAdapter] = get_windows_network_adapters() + neighbors = get_windows_net_neighbors(network_adapters) + + output = format_net_neighbors_list(neighbors) + + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + full_output_path = fsutil.join(output_base, "arp-cache") + + collector.output.write_bytes(full_output_path, output.encode()) + collector.report.add_command_collected(cls.__name__, ["arp-cache"]) + + +@register_module("--win-rdp-sessions") +@local_module +class WinRDPSessions(Module): + DESC = "Windows Remote Desktop session information" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + # where.exe instead of where, just in case the client runs in PS instead of CMD + # by default where hides qwinsta on 32-bit systems because qwinsta is only 64-bit, but with recursive /R search + # we can still manage to find it and by passing the exact path Windows will launch a 64-bit process + # on systems capable of doing that. + qwinsta = subprocess.run( + ["where.exe", "/R", os.environ["WINDIR"], "qwinsta.exe"], capture_output=True, text=True + ).stdout.split("\n")[0] + return [ + ("command", ([qwinsta, "/VM"], "win-rdp-sessions")), + ] + + +@register_module("--winpmem") +@local_module +class WinMemDump(Module): + DESC = "Windows full memory dump" + EXEC_ORDER = ExecutionOrder.BOTTOM + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + winpmem_file_name = "winpmem.exe" + winpmem_exec = shutil.which(winpmem_file_name) + + command_parts = [winpmem_exec, "-"] + + if winpmem_exec is None: + command_parts.pop(0) + command_parts.insert(0, winpmem_file_name) + collector.report.add_command_failed(cls.__name__, command_parts) + log.error( + "- Failed to collect output from command `%s`, program %s not found", + " ".join(command_parts), + winpmem_file_name, + ) + return + + else: + log.info("- Collecting output from command `%s`", " ".join(command_parts)) + + mem_dump_path = collector.output.path.with_name("winpmem") + mem_dump_errors_path = mem_dump_path.with_name("winpmem.errors") + + output_base = collector.COMMAND_OUTPUT_BASE + if collector.base: + output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) + + mem_dump_output_path = fsutil.join(output_base, mem_dump_path.name) + mem_dump_errors_output_path = fsutil.join(output_base, mem_dump_errors_path.name) + + with mem_dump_path.open(mode="wb") as mem_dump_fh: + with mem_dump_errors_path.open(mode="wb") as mem_dump_errors_fh: + try: + # The shell parameter must be set to False, as otherwise the + # output from stdout is not piped into the filehandle. + # The check parameter must be set to False, as winpmem.exe + # always seems to exit with an error code, even on success. + subprocess.run( + bufsize=0, + args=command_parts, + stdout=mem_dump_fh, + stderr=mem_dump_errors_fh, + shell=False, + check=False, + ) + + except Exception: + collector.report.add_command_failed(cls.__name__, command_parts) + log.error( + "- Failed to collect output from command `%s`", + " ".join(command_parts), + exc_info=True, + ) + return + + collector.output.write_entry(mem_dump_output_path, mem_dump_path) + collector.output.write_entry(mem_dump_errors_output_path, mem_dump_errors_path) + collector.report.add_command_collected(cls.__name__, command_parts) + mem_dump_path.unlink() + mem_dump_errors_path.unlink() + + +@register_module("--winmem-files") +class WinMemFiles(Module): + DESC = "Windows memory files" + SPEC = [ + ("file", "sysvol/pagefile.sys"), + ("file", "sysvol/hiberfil.sys"), + ("file", "sysvol/swapfile.sys"), + ("file", "sysvol/windows/memory.dmp"), + ("dir", "sysvol/windows/minidump"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + page_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Memory Management" + for reg_key in target.registry.iterkeys(page_key): + for page_path in reg_key.value("ExistingPageFiles").value: + spec.add(("file", target.resolve(page_path))) + + crash_key = "HKLM\\SYSTEM\\CurrentControlSet\\Control\\CrashControl" + for reg_key in target.registry.iterkeys(crash_key): + spec.add(("file", target.resolve(reg_key.value("DumpFile").value))) + spec.add(("dir", target.resolve(reg_key.value("MinidumpDir").value))) + + return spec + + +@register_module("-e", "--eventlogs") +class EventLogs(Module): + DESC = "event logs" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + evt_log_paths = evt.EvtPlugin(target).get_logs(filename_glob="*.evt") + for path in evt_log_paths: + spec.add(("file", path)) + evtx_log_paths = evtx.EvtxPlugin(target).get_logs(filename_glob="*.evtx") + for path in evtx_log_paths: + spec.add(("file", path)) + return spec + + +@register_module("-t", "--tasks") +class Tasks(Module): + SPEC = [ + ("dir", "sysvol/windows/tasks"), + ("dir", "sysvol/windows/system32/tasks"), + ("dir", "sysvol/windows/syswow64/tasks"), + ("dir", "sysvol/windows/sysvol/domain/policies"), + ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), + # Task Scheduler Service transaction log + ("file", "sysvol/SchedLgU.txt"), + ("file", "sysvol/windows/SchedLgU.txt"), + ("file", "sysvol/windows/tasks/SchedLgU.txt"), + ("file", "sysvol/winnt/tasks/SchedLgU.txt"), + ] + + +@register_module("-ad", "--active-directory") +class ActiveDirectory(Module): + DESC = "Active Directory data (policies, scripts, etc.)" + SPEC = [ + ("dir", "sysvol/windows/sysvol/domain"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters" + for reg_key in target.registry.iterkeys(key): + try: + spec.add(("dir", reg_key.value("SysVol").value)) + except Exception: + pass + return spec + + +@register_module("-nt", "--ntds") +class NTDS(Module): + SPEC = [ + ("dir", "sysvol/windows/NTDS"), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + key = "HKLM\\SYSTEM\\CurrentControlSet\\services\\NTDS\\Parameters" + values = [ + ("dir", "DSA Working Directory"), + ("file", "DSA Database File"), + ("file", "Database backup path"), + ("dir", "Database log files path"), + ] + for reg_key in target.registry.iterkeys(key): + for collect_type, value in values: + path = reg_key.value(value).value + spec.add((collect_type, path)) + + return spec + + +@register_module("--etl") +class ETL(Module): + DESC = "interesting ETL files" + SPEC = [ + ("glob", "sysvol/Windows/System32/WDI/LogFiles/*.etl"), + ] + + +@register_module("--recents") +class Recents(Module): + DESC = "Windows recently used files artifacts" + SPEC = [ + ("dir", "AppData/Roaming/Microsoft/Windows/Recent", from_user_home), + ("dir", "AppData/Roaming/Microsoft/Office/Recent", from_user_home), + ("glob", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/*.lnk", from_user_home), + ("glob", "Desktop/*.lnk", from_user_home), + ("glob", "Recent/*.lnk", from_user_home), + ("glob", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/*.lnk"), + ] + + +@register_module("--startup") +class Startup(Module): + DESC = "Windows Startup folder" + SPEC = [ + ("dir", "sysvol/ProgramData/Microsoft/Windows/Start Menu/Programs/Startup"), + ("dir", "AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup", from_user_home), + ] + + +def recyclebin_filter(path: fsutil.TargetPath) -> bool: + return bool(path.stat().st_size >= (10 * 1024 * 1024)) # 10MB + + +@register_module("--recyclebin") +@module_arg( + "--large-files", + action=argparse.BooleanOptionalAction, + help="Collect files larger than 10MB in the Recycle Bin", +) +@module_arg( + "--data-files", + action=argparse.BooleanOptionalAction, + help="Collect the data files in the Recycle Bin", +) +class RecycleBin(Module): + DESC = "recycle bin metadata and data files" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + large_files_filter = None if cli_args.large_files else recyclebin_filter + + if large_files_filter: + log.info("Skipping files in Recycle Bin that are larger than 10MB.") + + patterns = ["$Recycle.bin/*/$I*", "Recycler/*/INFO2", "Recycled/INFO2"] + + if cli_args.data_files is None or cli_args.data_files: + patterns.extend(["$Recycle.Bin/$R*", "$Recycle.Bin/*/$R*", "RECYCLE*/D*"]) + + with collector.file_filter(large_files_filter): + for fs, main_mountpoint, name, mountpoints in iter_ntfs_filesystems(target): + log.info("Acquiring recycle bin from %s as %s (%s)", fs, name, mountpoints) + + for pattern in patterns: + if main_mountpoint is not None: + pattern = fsutil.join(main_mountpoint, pattern) + collector.collect_glob(pattern) + else: + # In case the NTFS filesystem is not mounted, which should not occur but + # iter_ntfs_filesystems allows for the possibility, we fall back to raw file + # collection. + for entry in fs.path().glob(pattern): + if entry.is_file(): + collector.collect_file_raw(fs, entry, name) + + +@register_module("--drivers") +class Drivers(Module): + DESC = "installed drivers" + SPEC = [ + ("glob", "sysvol/windows/system32/drivers/*.sys"), + ] + + +@register_module("--exchange") +class Exchange(Module): + DESC = "interesting Exchange configuration files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + key = "HKLM\\SOFTWARE\\Microsoft\\ExchangeServer" + for reg_key in target.registry.iterkeys(key): + for subkey in reg_key.subkeys(): + try: + setup_key = subkey.subkey("Setup") + install_path = setup_key.value("MsiInstallPath").value + spec.update( + [ + ( + "file", + f"{install_path}\\TransportRoles\\Agents\\agents.config", + ), + ( + "dir", + f"{install_path}\\Logging\\Ews", + ), + ( + "dir", + f"{install_path}\\Logging\\CmdletInfra\\Powershell-Proxy\\Cmdlet", + ), + ( + "dir", + f"{install_path}\\TransportRoles\\Logs", + ), + ] + ) + except Exception: + pass + return spec + + +@register_module("--iis") +class IIS(Module): + DESC = "IIS logs" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set( + [ + ("glob", "sysvol\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\Windows.old\\Windows\\System32\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\inetpub\\logs\\LogFiles\\*.log"), + ("glob", "sysvol\\inetpub\\logs\\LogFiles\\W3SVC*\\*.log"), + ("glob", "sysvol\\Resources\\Directory\\*\\LogFiles\\Web\\W3SVC*\\*.log"), + ] + ) + iis_plugin = iis.IISLogsPlugin(target) + spec.update([("file", log_path) for _, log_path in iis_plugin.iter_log_format_path_pairs()]) + return spec + + +@register_module("--prefetch") +class Prefetch(Module): + DESC = "Windows Prefetch files" + SPEC = [ + ("dir", "sysvol/windows/prefetch"), + ] + + +@register_module("--appcompat") +class Appcompat(Module): + DESC = "Windows Amcache and RecentFileCache" + SPEC = [ + ("dir", "sysvol/windows/appcompat"), + ] + + +@register_module("--pca") +class PCA(Module): + DESC = "Windows Program Compatibility Assistant" + SPEC = [ + ("dir", "sysvol/windows/pca"), + ] + + +@register_module("--syscache") +class Syscache(Module): + DESC = "Windows Syscache hive and log files" + SPEC = [ + ("file", "sysvol/System Volume Information/Syscache.hve"), + ("glob", "sysvol/System Volume Information/Syscache.hve.LOG*"), + ] + + +@register_module("--win-notifications") +class WindowsNotifications(Module): + DESC = "Windows Push Notifications Database files." + SPEC = [ + # Old Win7/Win10 version of the file + ("file", "AppData/Local/Microsoft/Windows/Notifications/appdb.dat", from_user_home), + # New version of the file + ("file", "AppData/Local/Microsoft/Windows/Notifications/wpndatabase.db", from_user_home), + ] + + +@register_module("--bits") +class BITS(Module): + DESC = "Background Intelligent Transfer Service (BITS) queue/log DB" + SPEC = [ + # Pre-Win10 the BITS DB files are called qmgr[01].dat, in Win10 it is + # called qmgr.db and its transaction logs edb.log and edb.log[0-2] + # Win 2000/XP/2003 path + # (basically: \%ALLUSERSPROFILE%\Application Data\Microsoft\...) + ("glob", "sysvol/Documents and Settings/All Users/Application Data/Microsoft/Network/Downloader/qmgr*.dat"), + # Win Vista and higher path + # (basically: \%ALLUSERSPROFILE%\Microsoft\...; %ALLUSERSPROFILE% == %PROGRAMDATA%) + ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr*.dat"), + # Win 10 files + ("file", "sysvol/ProgramData/Microsoft/Network/Downloader/qmgr.db"), + ("glob", "sysvol/ProgramData/Microsoft/Network/Downloader/edb.log*"), + ] + + +@register_module("--wbem") +class WBEM(Module): + DESC = "Windows WBEM (WMI) database files" + SPEC = [ + ("dir", "sysvol/windows/system32/wbem/Repository"), + ] + + +@register_module("--dhcp") +class DHCP(Module): + DESC = "Windows Server DHCP files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + key = "HKLM\\SYSTEM\\CurrentControlSet\\Services\\DhcpServer\\Parameters" + for reg_key in target.registry.iterkeys(key): + spec.add(("dir", reg_key.value("DatabasePath").value)) + return spec + + +@register_module("--dns") +class DNS(Module): + DESC = "Windows Server DNS files" + SPEC = [ + ("glob", "sysvol/windows/system32/config/netlogon.*"), + ("dir", "sysvol/windows/system32/dns"), + ] + + +@register_module("--win-dns-cache") +@local_module +class WinDnsClientCache(Module): + DESC = "The contents of Windows DNS client cache" + SPEC = [ + ( + "command", + # Powershell.exe understands a subcommand passed as single string parameter, + # no need to split the subcommand in parts. + ( + ["powershell.exe", "-Command", "Get-DnsClientCache | ConvertTo-Csv -NoTypeInformation"], + "get-dnsclientcache", + ), + ), + ] + EXEC_ORDER = ExecutionOrder.BOTTOM + + +@register_module("--powershell") +class PowerShell(Module): + DESC = "Windows PowerShell Artefacts" + SPEC = [ + ("dir", "AppData/Roaming/Microsoft/Windows/PowerShell", from_user_home), + ] + + +@register_module("--thumbnail-cache") +class ThumbnailCache(Module): + DESC = "Windows thumbnail db artifacts" + SPEC = [ + ("glob", "AppData/Local/Microsoft/Windows/Explorer/thumbcache_*", from_user_home), + ] + + +@register_module("--text-editor") +class TextEditor(Module): + DESC = "text editor (un)saved tab contents" + # Only Windows 11 notepad & Notepad++ tabs for now, but locations for other text editors may be added later. + SPEC = [ + ("dir", "AppData/Local/Packages/Microsoft.WindowsNotepad_8wekyb3d8bbwe/LocalState/TabState/", from_user_home), + ("dir", "AppData/Roaming/Notepad++/backup/", from_user_home), + ] + + +@register_module("--misc") +class Misc(Module): + DESC = "miscellaneous Windows artefacts" + SPEC = [ + ("file", "sysvol/windows/PFRO.log"), + ("file", "sysvol/windows/setupapi.log"), + ("file", "sysvol/windows/setupapidev.log"), + ("glob", "sysvol/windows/inf/setupapi*.log"), + ("glob", "sysvol/system32/logfiles/*/*.txt"), + ("dir", "sysvol/windows/system32/sru"), + ("dir", "sysvol/windows/system32/drivers/etc"), + ("dir", "sysvol/Windows/System32/WDI/LogFiles/StartupInfo"), + ("dir", "sysvol/windows/system32/GroupPolicy/DataStore/"), + ("dir", "sysvol/ProgramData/Microsoft/Group Policy/History/"), + ("dir", "AppData/Local/Microsoft/Group Policy/History/", from_user_home), + ("glob", "sysvol/Windows/System32/LogFiles/SUM/*.mdb"), + ("glob", "sysvol/ProgramData/USOShared/Logs/System/*.etl"), + ("glob", "sysvol/Windows/Logs/WindowsUpdate/WindowsUpdate*.etl"), + ("glob", "sysvol/Windows/Logs/CBS/CBS*.log"), + ("dir", "sysvol/ProgramData/Microsoft/Search/Data/Applications/Windows"), + ("dir", "sysvol/Windows/SoftwareDistribution/DataStore"), + ] + + +@register_module("--av") +class AV(Module): + DESC = "various antivirus logs" + SPEC = [ + # AVG + ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/log"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/AVG/Antivirus/report"), + ("dir", "sysvol/ProgramData/AVG/Antivirus/log"), + ("dir", "sysvol/ProgramData/AVG/Antivirus/report"), + # Avast + ("dir", "sysvol/Documents And Settings/All Users/Application Data/Avast Software/Avast/Log"), + ("dir", "sysvol/ProgramData/Avast Software/Avast/Log"), + ("dir", "Avast Software/Avast/Log", from_user_home), + ("file", "sysvol/ProgramData/Avast Software/Avast/Chest/index.xml"), + # Avira + ("dir", "sysvol/ProgramData/Avira/Antivirus/LOGFILES"), + ("dir", "sysvol/ProgramData/Avira/Security/Logs"), + ("dir", "sysvol/ProgramData/Avira/VPN"), + # Bitdefender + ("dir", "sysvol/ProgramData/Bitdefender/Endpoint Security/Logs"), + ("dir", "sysvol/ProgramData/Bitdefender/Desktop/Profiles/Logs"), + ("glob", "sysvol/Program Files*/Bitdefender*/*"), + # ComboFix + ("file", "sysvol/ComboFix.txt"), + # Cybereason + ("dir", "sysvol/ProgramData/crs1/Logs"), + ("dir", "sysvol/ProgramData/apv2/Logs"), + ("dir", "sysvol/ProgramData/crb1/Logs"), + # Cylance + ("dir", "sysvol/ProgramData/Cylance/Desktop"), + ("dir", "sysvol/ProgramData/Cylance/Optics/Log"), + ("dir", "sysvol/Program Files/Cylance/Desktop/log"), + # ESET + ("dir", "sysvol/Documents and Settings/All Users/Application Data/ESET/ESET NOD32 Antivirus/Logs"), + ("dir", "sysvol/ProgramData/ESET/ESET NOD32 Antivirus/Logs"), + ("dir", "sysvol/ProgramData/ESET/ESET Security/Logs"), + ("dir", "sysvol/ProgramData/ESET/RemoteAdministrator/Agent/EraAgentApplicationData/Logs"), + ("dir", "sysvol/Windows/System32/config/systemprofile/AppData/Local/ESET/ESET Security/Quarantine"), + # Emsisoft + ("glob", "sysvol/ProgramData/Emsisoft/Reports/scan*.txt"), + # F-Secure + ("dir", "sysvol/ProgramData/F-Secure/Log"), + ("dir", "AppData/Local/F-Secure/Log", from_user_home), + ("dir", "sysvol/ProgramData/F-Secure/Antivirus/ScheduledScanReports"), + # HitmanPro + ("dir", "sysvol/ProgramData/HitmanPro/Logs"), + ("dir", "sysvol/ProgramData/HitmanPro.Alert/Logs"), + ("file", "sysvol/ProgramData/HitmanPro.Alert/excalibur.db"), + ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), + # Malwarebytes + ("glob", "sysvol/ProgramData/Malwarebytes/Malwarebytes Anti-Malware/Logs/mbam-log-*.xml"), + ("glob", "sysvol/ProgramData/Malwarebytes/MBAMService/logs/mbamservice.log*"), + ("dir", "AppData/Roaming/Malwarebytes/Malwarebytes Anti-Malware/Logs", from_user_home), + ("dir", "sysvol/ProgramData/Malwarebytes/MBAMService/ScanResults"), + # McAfee + ("dir", "Application Data/McAfee/DesktopProtection", from_user_home), + ("dir", "sysvol/ProgramData/McAfee/DesktopProtection"), + ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs"), + ("dir", "sysvol/ProgramData/McAfee/Endpoint Security/Logs_Old"), + ("dir", "sysvol/ProgramData/Mcafee/VirusScan"), + ("dir", "sysvol/ProgramData/McAfee/MSC/Logs"), + ("dir", "sysvol/ProgramData/McAfee/Agent/AgentEvents"), + ("dir", "sysvol/ProgramData/McAfee/Agent/logs"), + ("dir", "sysvol/ProgramData/McAfee/datreputation/Logs"), + ("dir", "sysvol/ProgramData/Mcafee/Managed/VirusScan/Logs"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Common Framework/AgentEvents"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/MCLOGS/SAE"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/datreputation/Logs"), + ("dir", "sysvol/Documents and Settings/All Users/Application Data/McAfee/Managed/VirusScan/Logs"), + ("dir", "sysvol/Program Files (x86)/McAfee/DLP/WCF Service/Log"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Apache2/Logs"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/DB/Events/Debug"), + ("dir", "sysvol/Program Files (x86)/McAfee/ePolicy Orchestrator/Server/Logs"), + # RogueKiller + ("glob", "sysvol/ProgramData/RogueKiller/logs/AdliceReport_*.json"), + # SUPERAntiSpyware + ("dir", "AppData/Roaming/SUPERAntiSpyware/Logs", from_user_home), + # SecureAge + ("dir", "sysvol/ProgramData/SecureAge Technology/SecureAge/log"), + # SentinelOne + ("dir", "sysvol/programdata/sentinel/logs"), + # Sophos + ("glob", "sysvol/Documents and Settings/All Users/Application Data/Sophos/Sophos */Logs"), + ("glob", "sysvol/ProgramData/Sophos/Sophos */Logs"), + # Symantec + ( + "dir", + "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Logs/AV", + ), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Logs"), + ("dir", "AppData/Local/Symantec/Symantec Endpoint Protection/Logs", from_user_home), + ("dir", "sysvol/Windows/System32/winevt/logs/Symantec Endpoint Protection Client.evtx"), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/CmnClnt/ccSubSDK"), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/registrationInfo.xml"), + # TotalAV + ("glob", "sysvol/Program Files*/TotalAV/logs"), + ("dir", "sysvol/ProgramData/TotalAV/logs"), + # Trendmicro + ("glob", "sysvol/Program Files*/Trend Micro"), + # VIPRE + ("dir", "sysvol/ProgramData/VIPRE Business Agent/Logs"), + ("dir", "AppData/Roaming/VIPRE Business", from_user_home), + ("dir", "AppData/Roaming/GFI Software/AntiMalware/Logs", from_user_home), + ("dir", "AppData/Roaming/Sunbelt Software/AntiMalware/Logs", from_user_home), + # Webroot + ("file", "sysvol/ProgramData/WRData/WRLog.log"), + # Microsoft Windows Defender + ("dir", "sysvol/ProgramData/Microsoft/Microsoft AntiMalware/Support"), + ("glob", "sysvol/Windows/System32/winevt/Logs/Microsoft-Windows-Windows Defender*.evtx"), + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Support"), + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Scans/History/Service/DetectionHistory"), + ("file", "sysvol/Windows/Temp/MpCmdRun.log"), + ("file", "sysvol/Windows.old/Windows/Temp/MpCmdRun.log"), + ] + + +@register_module("--quarantined") +class QuarantinedFiles(Module): + DESC = "files quarantined by various antivirus products" + SPEC = [ + # Microsoft Defender + # https://knez.github.io/posts/how-to-extract-quarantine-files-from-windows-defender/ + ("dir", "sysvol/ProgramData/Microsoft/Windows Defender/Quarantine"), + # Symantec Endpoint Protection + ( + "dir", + "sysvol/Documents and Settings/All Users/Application Data/Symantec/Symantec Endpoint Protection/Quarantine", + ), + ("glob", "sysvol/ProgramData/Symantec/Symantec Endpoint Protection/*/Data/Quarantine"), + # Trend Micro + # https://secret.inf.ufpr.br/papers/marcus_av_handson.pdf + ("dir", "sysvol/ProgramData/Trend Micro/AMSP/quarantine"), + # McAfee + ("dir", "sysvol/Quarantine"), + ("dir", "sysvol/ProgramData/McAfee/VirusScan/Quarantine"), + # Sophos + ("glob", "sysvol/ProgramData/Sophos/Sophos/*/Quarantine"), + ("glob", "sysvol/ProgramData/Sophos/Sophos */INFECTED"), + ("dir", "sysvol/ProgramData/Sophos/Safestore"), + # HitmanPRO + ("dir", "sysvol/ProgramData/HitmanPro/Quarantine"), + ] + + +@register_module("--edr") +class EDR(Module): + DESC = "various Endpoint Detection and Response (EDR) logs" + SPEC = [ + # Carbon Black + ("dir", "sysvol/ProgramData/CarbonBlack/Logs"), + ] + + +@register_module("--history") +class History(Module): + DESC = "browser history from IE, Edge, Firefox, and Chrome" + DIR_COMBINATIONS = namedtuple("DirCombinations", ["root_dirs", "dir_extensions", "history_files"]) + COMMON_DIR_COMBINATIONS = [ + DIR_COMBINATIONS( + [ + # Chromium - RHEL/Ubuntu - DNF/apt + ".config/chromium", + # Chrome - RHEL/Ubuntu - DNF + ".config/google-chrome", + # Edge - RHEL/Ubuntu - DNF/apt + ".config/microsoft-edge", + # Chrome - RHEL/Ubuntu - Flatpak + ".var/app/com.google.Chrome/config/google-chrome", + # Edge - RHEL/Ubuntu - Flatpak + ".var/app/com.microsoft.Edge/config/microsoft-edge", + # Chromium - RHEL/Ubuntu - Flatpak + ".var/app/org.chromium.Chromium/config/chromium", + # Chrome + "AppData/Local/Google/Chrom*/User Data", + # Edge + "AppData/Local/Microsoft/Edge/User Data", + "Library/Application Support/Microsoft Edge", + "Local Settings/Application Data/Microsoft/Edge/User Data", + # Chrome - Legacy + "Library/Application Support/Chromium", + "Library/Application Support/Google/Chrome", + "Local Settings/Application Data/Google/Chrom*/User Data", + # Chromium - RHEL/Ubuntu - snap + "snap/chromium/common/chromium", + # Brave - Windows + "AppData/Local/BraveSoftware/Brave-Browser/User Data", + "AppData/Roaming/BraveSoftware/Brave-Browser/User Data", + # Brave - Linux + ".config/BraveSoftware", + # Brave - MacOS + "Library/Application Support/BraveSoftware", + ], + ["*", "Snapshots/*/*"], + [ + "Archived History", + "Bookmarks", + "Cookies*", + "Network", + "Current Session", + "Current Tabs", + "Extension Cookies", + "Favicons", + "History", + "Last Session", + "Last Tabs", + "Login Data", + "Login Data For Account", + "Media History", + "Shortcuts", + "Snapshots", + "Top Sites", + "Web Data", + ], + ), + ] + + SPEC = [ + # IE + ("dir", "AppData/Local/Microsoft/Internet Explorer/Recovery", from_user_home), + ("dir", "AppData/Local/Microsoft/Windows/INetCookies", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/WebCache/*.dat", from_user_home), + # IE - index.dat + ("file", "Cookies/index.dat", from_user_home), + ("file", "Local Settings/History/History.IE5/index.dat", from_user_home), + ("glob", "Local Settings/History/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "Local Settings/Temporary Internet Files/Content.IE5/index.dat", from_user_home), + ("file", "Local Settings/Application Data/Microsoft/Feeds Cache/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/History/History.IE5/index.dat", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/History/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/index.dat", from_user_home), + ("glob", "AppData/Local/Microsoft/Windows/History/Low/History.IE5/MSHist*/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Content.IE5/index.dat", from_user_home), + ("file", "AppData/Local/Microsoft/Windows/Temporary Internet Files/Low/Content.IE5/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/Cookies/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/Cookies/Low/index.dat", from_user_home), + ("file", "AppData/Roaming/Microsoft/Windows/IEDownloadHistory/index.dat", from_user_home), + # Firefox - Windows + ("glob", "AppData/Local/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + ("glob", "AppData/Roaming/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + ("glob", "Application Data/Mozilla/Firefox/Profiles/*/*.sqlite*", from_user_home), + # Firefox - macOS + ("glob", "/Users/*/Library/Application Support/Firefox/Profiles/*/*.sqlite*"), + # Firefox - RHEL/Ubuntu - Flatpak + ("glob", ".var/app/org.mozilla.firefox/.mozilla/firefox/*/*.sqlite*", from_user_home), + # Firefox - RHEL/Ubuntu - DNF/apt + ("glob", ".mozilla/firefox/*/*.sqlite*", from_user_home), + # Firefox - RHEL/Ubuntu - snap + ("glob", "snap/firefox/common/.mozilla/firefox/*/*.sqlite*", from_user_home), + # Safari - macOS + ("file", "Library/Safari/Bookmarks.plist", from_user_home), + ("file", "Library/Safari/Downloads.plist", from_user_home), + ("file", "Library/Safari/Extensions/Extensions.plist", from_user_home), + ("glob", "Library/Safari/History.*", from_user_home), + ("file", "Library/Safari/LastSession.plist", from_user_home), + ("file", "Library/Caches/com.apple.Safari/Cache.db", from_user_home), + ] + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + for root_dirs, extension_dirs, history_files in cls.COMMON_DIR_COMBINATIONS: + for root_dir, extension_dir, history_file in product(root_dirs, extension_dirs, history_files): + full_path = f"{root_dir}/{extension_dir}/{history_file}" + search_type = "glob" if "*" in full_path else "file" + + spec.add((search_type, full_path, from_user_home)) + + return spec + + +@register_module("--remoteaccess") +class RemoteAccess(Module): + DESC = "common remote access tools' log files" + SPEC = [ + # teamviewer + ("glob", "sysvol/Program Files/TeamViewer/*.log"), + ("glob", "sysvol/Program Files (x86)/TeamViewer/*.log"), + ("glob", "/var/log/teamviewer*/*.log"), + ("glob", "AppData/Roaming/TeamViewer/*.log", from_user_home), + ("glob", "Library/Logs/TeamViewer/*.log", from_user_home), + # anydesk - Windows + ("dir", "sysvol/ProgramData/AnyDesk"), + ("glob", "AppData/Roaming/AnyDesk/*.trace", from_user_home), + ("glob", "AppData/Roaming/AnyDesk/*/*.trace", from_user_home), + # anydesk - Mac + Linux + ("glob", ".anydesk*/*.trace", from_user_home), + ("file", "/var/log/anydesk.trace"), + # zoho + ("dir", "sysvol/ProgramData/ZohoMeeting/log"), + ("dir", "AppData/Local/ZohoMeeting/log", from_user_home), + # realvnc + ("file", "sysvol/ProgramData/RealVNC-Service/vncserver.log"), + ("file", "AppData/Local/RealVNC/vncserver.log", from_user_home), + # tightvnc + ("dir", "sysvol/ProgramData/TightVNC/Server/Logs"), + # Remote desktop cache files + ("dir", "AppData/Local/Microsoft/Terminal Server Client/Cache", from_user_home), + ] + + +@register_module("--webhosting") +class WebHosting(Module): + DESC = "Web hosting software log files" + SPEC = [ + # cPanel + ("dir", "/usr/local/cpanel/logs"), + ("file", ".lastlogin", from_user_home), + ] + + +@register_module("--wer") +class WER(Module): + DESC = "WER (Windows Error Reporting) related files" + + @classmethod + def get_spec_additions(cls, target: Target, cli_args: argparse.Namespace) -> Iterator[tuple]: + spec = set() + + for wer_dir in itertools.chain( + ["sysvol/ProgramData/Microsoft/Windows/WER"], + from_user_home(target, "AppData/Local/Microsoft/Windows/WER"), + ): + for path in target.fs.path(wer_dir).rglob("*"): + if not path.is_file(): + continue + + if path.stat().st_size >= (1024 * 1024 * 1024): # 1GB + log.debug("Skipping WER file because it exceeds 1GB: %s", path) + continue + + spec.add(("file", path)) + + return spec + + +@register_module("--etc") +class Etc(Module): + SPEC = [ + # In OS-X /etc is a symlink to /private/etc. To prevent collecting + # duplicates, we only use the /etc directory here. + ("dir", "/etc"), + ("dir", "/usr/local/etc"), + ] + + +@register_module("--boot") +class Boot(Module): + SPEC = [ + ("glob", "/boot/config*"), + ("glob", "/boot/efi*"), + ("glob", "/boot/grub*"), + ("glob", "/boot/init*"), + ("glob", "/boot/system*"), + ] + + +def private_key_filter(path: fsutil.TargetPath) -> bool: + if path.is_file() and not path.is_symlink(): + with path.open("rt") as file: + return "PRIVATE KEY" in file.readline() + + +@register_module("--home") +class Home(Module): + SPEC = [ + # Catches most shell related configuration files + ("glob", ".*[akz]sh*", from_user_home), + ("glob", "*/.*[akz]sh*", from_user_home), + # Added to catch any shell related configuration file not caught with the above glob + ("glob", ".*history", from_user_home), + ("glob", "*/.*history", from_user_home), + ("glob", ".*rc", from_user_home), + ("glob", "*/.*rc", from_user_home), + ("glob", ".*_logout", from_user_home), + ("glob", "*/.*_logout", from_user_home), + # Miscellaneous configuration files + ("dir", ".config", from_user_home), + ("glob", "*/.config", from_user_home), + ("file", ".wget-hsts", from_user_home), + ("glob", "*/.wget-hsts", from_user_home), + ("file", ".gitconfig", from_user_home), + ("glob", "*/.gitconfig", from_user_home), + ("file", ".selected_editor", from_user_home), + ("glob", "*/.selected_editor", from_user_home), + ("file", ".viminfo", from_user_home), + ("glob", "*/.viminfo", from_user_home), + ("file", ".lesshist", from_user_home), + ("glob", "*/.lesshist", from_user_home), + ("file", ".profile", from_user_home), + ("glob", "*/.profile", from_user_home), + # OS-X home (aka /Users) + ("glob", ".bash_sessions/*", from_user_home), + ("glob", "Library/LaunchAgents/*", from_user_home), + ("glob", "Library/Logs/*", from_user_home), + ("glob", "Preferences/*", from_user_home), + ("glob", "Library/Preferences/*", from_user_home), + ] + + +@register_module("--ssh") +@module_arg("--private-keys", action=argparse.BooleanOptionalAction, help="Add any private keys") +class SSH(Module): + SPEC = [ + ("glob", ".ssh/*", from_user_home), + ("glob", "/etc/ssh/*"), + ("glob", "sysvol/ProgramData/ssh/*"), + ] + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + # Acquire SSH configuration in sshd directories + + filter = None if cli_args.private_keys else private_key_filter + + if filter: + log.info("Executing SSH without --private-keys, skipping private keys.") + + with collector.file_filter(filter): + super().run(target, cli_args, collector) + + +@register_module("--var") +class Var(Module): + SPEC = [ + # In OS-X /var is a symlink to /private/var. To prevent collecting + # duplicates, we only use the /var directory here. + ("dir", "/var/log"), + ("dir", "/var/spool/at"), + ("dir", "/var/spool/cron"), + ("dir", "/var/spool/anacron"), + ("dir", "/var/lib/dpkg/status"), + ("dir", "/var/lib/rpm"), + ("dir", "/var/db"), + ("dir", "/var/audit"), + ("dir", "/var/cron"), + ("dir", "/var/run"), + # some OS-X specific files + ("dir", "/private/var/at"), + ("dir", "/private/var/db/diagnostics"), + ("dir", "/private/var/db/uuidtext"), + ("file", "/private/var/vm/sleepimage"), + ("glob", "/private/var/vm/swapfile*"), + ("glob", "/private/var/folders/*/*/0/com.apple.notificationcenter/*/*"), + # user specific cron on OS-X + ("dir", "/usr/lib/cron"), + ] + + +@register_module("--bsd") +class BSD(Module): + SPEC = [ + ("file", "/bin/freebsd-version"), + ("dir", "/usr/ports"), + ] + + +@register_module("--osx") +class OSX(Module): + DESC = "OS-X specific files and directories" + SPEC = [ + # filesystem events + ("dir", "/.fseventsd"), + # kernel extensions + ("dir", "/Library/Extensions"), + ("dir", "/System/Library/Extensions"), + # logs + ("dir", "/Library/Logs"), + # autorun locations + ("dir", "/Library/LaunchAgents"), + ("dir", "/Library/LaunchDaemons"), + ("dir", "/Library/StartupItems"), + ("dir", "/System/Library/LaunchAgents"), + ("dir", "/System/Library/LaunchDaemons"), + ("dir", "/System/Library/StartupItems"), + # installed software + ("dir", "/Library/Receipts/InstallHistory.plist"), + ("file", "/System/Library/CoreServices/SystemVersion.plist"), + # system preferences + ("dir", "/Library/Preferences"), + # DHCP settings + ("dir", "/private/var/db/dhcpclient/leases"), + ] + + +@register_module("--osx-applications-info") +class OSXApplicationsInfo(Module): + DESC = "OS-X info.plist from all installed applications" + SPEC = [ + ("glob", "/Applications/*/Contents/Info.plist"), + ("glob", "Applications/*/Contents/Info.plist", from_user_home), + ] + + +@register_module("--bootbanks") +class Bootbanks(Module): + DESC = "ESXi bootbanks" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + # Both ESXi 6 and 7 compatible + boot_dirs = { + "boot": "BOOT", + "bootbank": "BOOTBANK1", + "altbootbank": "BOOTBANK2", + } + boot_fs = {} + + for boot_dir, boot_vol in boot_dirs.items(): + dir_path = target.fs.path(boot_dir) + if dir_path.is_symlink() and dir_path.exists(): + dst = dir_path.readlink() + fs = dst.get().top.fs + boot_fs[fs] = boot_vol + + for fs, mountpoint, uuid, _ in iter_esxi_filesystems(target): + if fs in boot_fs: + name = boot_fs[fs] + log.info("Acquiring %s (%s)", mountpoint, name) + mountpoint_len = len(mountpoint) + base = f"fs/{uuid}:{name}" + for path in target.fs.path(mountpoint).rglob("*"): + outpath = path.as_posix()[mountpoint_len:] + collector.collect_path(path, outpath=outpath, base=base) + + +@register_module("--esxi") +class ESXi(Module): + DESC = "ESXi interesting files" + SPEC = [ + ("dir", "/scratch/log"), + ("dir", "/locker/packages/var"), + # ESXi 7 + ("dir", "/scratch/cache"), + ("dir", "/scratch/vmkdump"), + # ESXi 6 + ("dir", "/scratch/vmware"), + ] + + +@register_module("--vmfs") +class VMFS(Module): + DESC = "ESXi VMFS metadata files" + + @classmethod + def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + for fs, mountpoint, uuid, name in iter_esxi_filesystems(target): + if not fs.__type__ == "vmfs": + continue + + log.info("Acquiring %s (%s)", mountpoint, name) + mountpoint_len = len(mountpoint) + base = f"fs/{uuid}:{name}" + for path in target.fs.path(mountpoint).glob("*.sf"): + outpath = path.as_posix()[mountpoint_len:] + collector.collect_path(path, outpath=outpath, base=base) + + +@register_module("--activities-cache") +class ActivitiesCache(Module): + DESC = "user's activities caches" + SPEC = [ + ("dir", "AppData/Local/ConnectedDevicesPlatform", from_user_home), + ] + + +@register_module("--hashes") +@module_arg( + "--hash-func", + action="append", + type=HashFunc, + choices=[h.value for h in HashFunc], + help="Hash function to use", +) +@module_arg("--dir-to-hash", action="append", help="Hash only files in a provided directory") +@module_arg("--ext-to-hash", action="append", help="Hash only files with the extensions provided") +@module_arg("--glob-to-hash", action="append", help="Hash only files that match provided glob") +class FileHashes(Module): + DESC = "file hashes" + + DEFAULT_HASH_FUNCS = (HashFunc.MD5, HashFunc.SHA1, HashFunc.SHA256) + DEFAULT_EXTENSIONS = ( + "bat", + "cmd", + "com", + "dll", + "exe", + "installlog", + "installutil", + "js", + "lnk", + "ps1", + "sys", + "tlb", + "vbs", + ) + DEFAULT_PATHS = ("sysvol/Windows/",) + + MAX_FILE_SIZE_BYTES = 100 * 1024 * 1024 # 100MB + + DEFAULT_FILE_FILTERS = ( + functools.partial(filter_out_by_path_match, re_pattern="^/(sysvol/)?Windows/WinSxS/"), + functools.partial(filter_out_huge_files, max_size_bytes=MAX_FILE_SIZE_BYTES), + functools.partial(filter_out_by_value_match, value=b"MZ", offsets=[0, 3]), + ) + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + log.info("*** Acquiring file hashes") + + specs = cls.get_specs(cli_args) + + with collector.bind_module(cls): + start = time.time() + + path_hashes = collect_hashes(target, specs, path_filters=cls.DEFAULT_FILE_FILTERS) + rows_count, csv_compressed_bytes = serialize_into_csv(path_hashes, compress=True) + + collector.write_bytes( + f"{collector.base}/{collector.METADATA_BASE}/file-hashes.csv.gz", + csv_compressed_bytes, + ) + log.info("Hashing is done, %s files processed in %.2f secs", rows_count, (time.time() - start)) + + @classmethod + def get_specs(cls, cli_args: argparse.Namespace) -> Iterator[tuple]: + path_selectors = [] + + if cli_args.ext_to_hash: + extensions = cli_args.ext_to_hash + else: + extensions = cls.DEFAULT_EXTENSIONS + + if cli_args.dir_to_hash or cli_args.glob_to_hash: + if cli_args.glob_to_hash: + path_selectors.extend([("glob", glob) for glob in cli_args.glob_to_hash]) + + if cli_args.dir_to_hash: + path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cli_args.dir_to_hash]) + + else: + path_selectors.extend([("dir", (dir_path, extensions)) for dir_path in cls.DEFAULT_PATHS]) + + if cli_args.hash_func: + hash_funcs = cli_args.hash_func + else: + hash_funcs = cls.DEFAULT_HASH_FUNCS + + return [(path_selector, hash_funcs) for path_selector in path_selectors] + + +@register_module("--handles") +@module_arg( + "--handle-types", + action="extend", + help="Collect only specified handle types", + type=NamedObjectType, + choices=[h.value for h in NamedObjectType], + nargs="*", +) +@local_module +class OpenHandles(Module): + DESC = "Open handles" + + @classmethod + def run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: + if not sys.platform == "win32": + log.error("Open Handles plugin can only run on Windows systems! Skipping...") + return + + from acquire.dynamic.windows.collect import collect_open_handles + from acquire.dynamic.windows.handles import serialize_handles_into_csv + + log.info("*** Acquiring open handles") + + handle_types = cli_args.handle_types + + with collector.bind_module(cls): + handles = collect_open_handles(handle_types) + csv_compressed_handles = serialize_handles_into_csv(handles) + + collector.write_bytes( + f"{collector.base}/{collector.METADATA_BASE}/open_handles.csv.gz", + csv_compressed_handles, + ) + log.info("Collecting open handles is done.") + + +def print_disks_overview(target: Target) -> None: + log.info("// Disks") + try: + for disk in target.disks: + log.info("%s", disk) + if not disk.vs: + continue + + for volume in disk.vs.volumes: + log.info("- %s", volume) + except Exception: + log.error("Failed to iterate disks") + log.info("") + + +def print_volumes_overview(target: Target) -> None: + log.info("// Volumes") + try: + for volume in target.volumes: + log.info("%s", volume) + except Exception: + log.error("Failed to iterate volumes") + log.info("") + + +def print_acquire_warning(target: Target) -> None: + if target.os != "windows": + log.warning("========================================== WARNING ==========================================") + log.warning("") + log.warning( + "The support for operating system '%s' is experimental. Some artifacts may not yet be included and some ", + target.os, + ) + log.warning("features may not work as expected. Please notify upstream for any missing artifacts or features.") + log.warning("") + log.warning("========================================== WARNING ==========================================") + + +def _add_modules_for_profile(choice: str, operating_system: str, profile: dict, msg: str) -> Optional[dict]: + modules_selected = dict() + + if choice and choice != "none": + profile_dict = profile[choice] + if operating_system not in profile_dict: + log.error(msg, operating_system, choice) + return None + + for mod in profile_dict[operating_system]: + modules_selected[mod.__modname__] = mod + + return modules_selected + + +def acquire_target(target: Target, args: argparse.Namespace, output_ts: Optional[str] = None) -> list[str | Path]: + acquire_gui = GUI() + files = [] + output_ts = output_ts or get_utc_now_str() + if args.log_to_dir: + log_file = args.log_path.joinpath(format_output_name("Unknown", output_ts, "log")) + # This will also rename the log file on disk, which was opened in main(), if the name is different + reconfigure_log_file(log, log_file, delay=True) + else: + log_file = args.log_path + + skip_list = set() + if log_file: + files.append(log_file) + if target.path.name == "local": + skip_list.add(normalize_path(target, log_file, resolve_parents=True, preserve_case=False)) + + print_disks_overview(target) + print_volumes_overview(target) + + if not target._os_plugin: + log.error("Error: Unable to detect OS") + return files + + hostname = "" + try: + hostname = target.hostname + except Exception: + log.exception("Failed to get hostname") + + version = None + try: + version = target.version + except Exception: + log.exception("Failed to detect OS version") + + if version is None: + os_plugin_name = target._os_plugin.__name__.lower() + version = f"{target.os} ({os_plugin_name})" + + log.info("Target name: %s", target.name) + log.info("Hostname: %s", hostname) + log.info("OS: %s", version) + log.info("") + + print_acquire_warning(target) + + modules_selected = {} + modules_successful = [] + modules_failed = {} + for name, mod in MODULES.items(): + name_slug = name.lower() + # check if module was set in the arguments provided + if getattr(args, name_slug): + modules_selected[name] = mod + + profile = args.profile + + # Set profile to default if no profile, modules, files, directories or globes were selected + if not profile and not modules_selected and not args.file and not args.directory and not args.glob: + log.info("Using default collection profile") + profile = "default" + log.info("") + + profile_modules = _add_modules_for_profile( + profile, target.os, PROFILES, "No collection set for OS %s with profile %s" + ) + + if not (volatile_profile := args.volatile_profile): + volatile_profile = "none" + + volatile_modules = _add_modules_for_profile( + volatile_profile, target.os, VOLATILE, "No collection set for OS %s with volatile profile %s" + ) + + if (profile_modules or volatile_modules) is None: + return files + + modules_selected.update(profile_modules) + modules_selected.update(volatile_modules) + + log.info("Modules selected: %s", ", ".join(sorted(modules_selected))) + + local_only_modules = {name: module for name, module in modules_selected.items() if hasattr(module, "__local__")} + if target.path.name != "local" and local_only_modules: + for name, module in local_only_modules.items(): + modules_failed[module.__name__] = "Not running on a local target" + log.error( + "Can not use local-only modules with non-local targets. Skipping: %s", + " ".join(sorted(local_only_modules.keys())), + ) + log.info("") + # Remove local-only modules from the modules list + modules_selected = dict(modules_selected.items() - local_only_modules.items()) + + log_file_handler = get_file_handler(log) + # Prepare log file and output file names + if log_file_handler and args.log_to_dir: + log_file = format_output_name(target.name, output_ts, "log") + # This will also rename the log file on disk, which was opened and written previously. + log_file_handler.set_filename(log_file) + log_path = Path(log_file_handler.baseFilename).resolve() + log.info("Logging to file %s", log_path) + files = [log_file_handler.baseFilename] + if target.path.name == "local": + skip_list = {normalize_path(target, log_path, resolve_parents=True, preserve_case=False)} + + output_path = args.output or args.output_file + if output_path.is_dir(): + output_dir = format_output_name(target.name, output_ts) + output_path = output_path.joinpath(output_dir) + output_path = output_path.resolve() + + output = OUTPUTS[args.output_type]( + output_path, + compress=args.compress, + compression_method=args.compress_method, + encrypt=args.encrypt, + public_key=args.public_key, + ) + files.append(output.path) + if target.path.name == "local": + skip_list.add(normalize_path(target, output.path, resolve_parents=True, preserve_case=False)) + + log.info("Writing output to %s", output.path) + if skip_list: + log.info("Skipping own files: %s", ", ".join(skip_list)) + log.info("") + + dir_base = "fs" + if target.os != "windows": + dir_base = "fs/$rootfs$" + + with Collector(target, output, base=dir_base, skip_list=skip_list) as collector: + # Acquire specified files + if args.file or args.directory or args.glob: + log.info("*** Acquiring specified paths") + spec = [] + + if args.file: + for path in args.file: + spec.append(("file", path.strip())) + + if args.directory: + for path in args.directory: + spec.append(("dir", path.strip())) + + if args.glob: + for path in args.glob: + spec.append(("glob", path.strip())) + + collector.collect(spec, module_name=CLI_ARGS_MODULE) + modules_successful.append(CLI_ARGS_MODULE) + log.info("") + + # Run modules (sort first based on execution order) + modules_selected = sorted(modules_selected.items(), key=lambda module: module[1].EXEC_ORDER) + count = 0 + for name, mod in modules_selected: + try: + mod.run(target, args, collector) + + modules_successful.append(mod.__name__) + except Exception: + log.error("Error while running module %s", name, exc_info=True) + modules_failed[mod.__name__] = get_formatted_exception() + + acquire_gui.progress = (acquire_gui.shard // len(modules_selected)) * count + count += 1 + + log.info("") + + collection_report = collector.report + + log.info("Done collecting artifacts:") + + # prepare and render full report only if logging level is more permissive than INFO + if log.level < logging.INFO: + log.debug(get_full_formatted_report(collection_report)) + + log.info(get_report_summary(collection_report)) + + if not args.disable_report: + collection_report_serialized = collection_report.get_records_per_module_per_outcome(serialize_records=True) + + execution_report = { + "target": str(target), + "name": target.name, + "timestamp": get_utc_now().isoformat(), + "modules-successful": modules_successful, + "modules-failed": modules_failed, + **collection_report_serialized, + } + + if args.output: + report_file_name = format_output_name(target.name, postfix=output_ts, ext="report.json") + else: + report_file_name = f"{output_path.name}.report.json" + + report_file_path = output_path.parent / report_file_name + persist_execution_report(report_file_path, execution_report) + + files.append(report_file_path) + log.info("Acquisition report for %s is written to %s", target, report_file_path) + + log.info("Output: %s", output.path) + return files + + +def upload_files(paths: list[str | Path], upload_plugin: UploaderPlugin, no_proxy: bool = False) -> None: + proxies = None if no_proxy else urllib.request.getproxies() + log.debug("Proxies: %s (no_proxy = %s)", proxies, no_proxy) + + log.info('Uploading files: "%s"', " ".join(map(str, paths))) + try: + upload_files_using_uploader(upload_plugin, paths, proxies) + except Exception: + log.error('Upload FAILED for files: "%s". See log file for details.', " ".join(map(str, paths))) + raise + else: + log.info("Upload succeeded.") + + +class WindowsProfile: + MINIMAL = [ + NTFS, + EventLogs, + Registry, + Tasks, + PowerShell, + Prefetch, + Appcompat, + PCA, + Misc, + Startup, + ] + DEFAULT = [ + *MINIMAL, + ETL, + Recents, + RecycleBin, + Drivers, + Syscache, + WBEM, + AV, + BITS, + DHCP, + DNS, + ActiveDirectory, + RemoteAccess, + ActivitiesCache, + ] + FULL = [ + *DEFAULT, + History, + NTDS, + QuarantinedFiles, + WindowsNotifications, + SSH, + IIS, + TextEditor, + ] + + +class LinuxProfile: + MINIMAL = [ + Etc, + Boot, + Home, + SSH, + Var, + ] + DEFAULT = MINIMAL + FULL = [ + *DEFAULT, + History, + WebHosting, + ] + + +class BsdProfile: + MINIMAL = [ + Etc, + Boot, + Home, + SSH, + Var, + BSD, + ] + DEFAULT = MINIMAL + FULL = MINIMAL + + +class ESXiProfile: + MINIMAL = [ + Bootbanks, + ESXi, + SSH, + ] + DEFAULT = [ + *MINIMAL, + VMFS, + ] + FULL = DEFAULT + + +class OSXProfile: + MINIMAL = [ + Etc, + Home, + Var, + OSX, + OSXApplicationsInfo, + ] + DEFAULT = MINIMAL + FULL = [ + *DEFAULT, + History, + SSH, + ] + + +PROFILES = { + "full": { + "windows": WindowsProfile.FULL, + "linux": LinuxProfile.FULL, + "bsd": BsdProfile.FULL, + "esxi": ESXiProfile.FULL, + "osx": OSXProfile.FULL, + }, + "default": { + "windows": WindowsProfile.DEFAULT, + "linux": LinuxProfile.DEFAULT, + "bsd": BsdProfile.DEFAULT, + "esxi": ESXiProfile.DEFAULT, + "osx": OSXProfile.DEFAULT, + }, + "minimal": { + "windows": WindowsProfile.MINIMAL, + "linux": LinuxProfile.MINIMAL, + "bsd": BsdProfile.MINIMAL, + "esxi": ESXiProfile.MINIMAL, + "osx": OSXProfile.MINIMAL, + }, + "none": None, +} + + +class VolatileProfile: + DEFAULT = [ + Netstat, + WinProcesses, + WinProcEnv, + WinArpCache, + WinRDPSessions, + WinDnsClientCache, + ] + EXTENSIVE = [ + Proc, + Sys, + ] + + +VOLATILE = { + "default": { + "windows": VolatileProfile.DEFAULT, + "linux": [], + "bsd": [], + "esxi": [], + "osx": [], + }, + "extensive": { + "windows": VolatileProfile.DEFAULT, + "linux": VolatileProfile.EXTENSIVE, + "bsd": VolatileProfile.EXTENSIVE, + "esxi": VolatileProfile.EXTENSIVE, + "osx": [], + }, + "none": None, +} + + +def exit_success(default_args: list[str]): + log.info("Acquire finished successful") + log.info("Arguments: %s", " ".join(sys.argv[1:])) + log.info("Default Arguments: %s", " ".join(default_args)) + log.info("Exiting with status code 0 (SUCCESS)") + sys.exit(0) + + +def exit_failure(default_args: list[str]): + log.error("Acquire FAILED") + log.error("Arguments: %s", " ".join(sys.argv[1:])) + log.error("Default Arguments: %s", " ".join(default_args)) + log.error("Exiting with status code 1 (FAILURE)") + sys.exit(1) + + +def main() -> None: + parser = create_argument_parser(PROFILES, VOLATILE, MODULES) + args, rest = parse_acquire_args(parser, config=CONFIG) + + # Since output has a default value, set it to None when output_file is defined + if args.output_file: + args.output = None + + try: + check_and_set_log_args(args) + except ValueError as err: + parser.exit(err) + + if args.log_to_dir: + # When args.upload files are specified, only these files are uploaded + # and no other action is done. Thus a log file specifically named + # Upload_.log is created + file_prefix = "Upload" if args.upload else "Unknown" + log_file = args.log_path.joinpath(format_output_name(file_prefix, args.start_time, "log")) + else: + log_file = args.log_path + + setup_logging(log, log_file, args.verbose, delay=args.log_delay) + + acquire_successful = True + files_to_upload = [log_file] + acquire_gui = None + try: + log.info(ACQUIRE_BANNER) + log.info("User: %s | Admin: %s", get_user_name(), is_user_admin()) + log.info("Arguments: %s", " ".join(sys.argv[1:])) + log.info("Default Arguments: %s", " ".join(args.config.get("arguments"))) + log.info("") + + # start GUI if requested through CLI / config + flavour = None + if args.gui == "always" or ( + args.gui == "depends" and os.environ.get("PYS_KEYSOURCE") == "prompt" and len(sys.argv) == 1 + ): + flavour = platform.system() + acquire_gui = GUI(flavour=flavour, upload_available=args.auto_upload) + + args.output, args.auto_upload, cancel = acquire_gui.wait_for_start(args) + if cancel: + log.info("Acquire cancelled") + exit_success(args.config.get("arguments")) + # From here onwards, the GUI will be locked and cannot be closed because we're acquiring + + plugins_to_load = [("cloud", MinIO)] + upload_plugins = UploaderRegistry("acquire.plugins", plugins_to_load) + + check_and_set_acquire_args(args, upload_plugins) + + if args.upload: + try: + upload_files(args.upload, args.upload_plugin, args.no_proxy) + except Exception as err: + acquire_gui.message("Failed to upload files") + log.exception(err) + exit_failure(args.config.get("arguments")) + exit_success(args.config.get("arguments")) + + target_paths = [] + for target_path in args.targets: + target_path = args_to_uri([target_path], args.loader, rest)[0] if args.loader else target_path + if target_path == "local": + target_query = {} + if args.force_fallback: + target_query.update({"force-directory-fs": 1}) + + if args.fallback: + target_query.update({"fallback-to-directory-fs": 1}) + + target_query = urllib.parse.urlencode(target_query) + target_path = f"{target_path}?{target_query}" + target_paths.append(target_path) + + try: + target_name = "Unknown" # just in case open_all already fails + for target in Target.open_all(target_paths): + target_name = "Unknown" # overwrite previous target name + target_name = target.name + log.info("Loading target %s", target_name) + log.info(target) + if target.os == "esxi" and target.name == "local": + # Loader found that we are running on an esxi host + # Perform operations to "enhance" memory + with esxi_memory_context_manager(): + files_to_upload = acquire_children_and_targets(target, args) + else: + files_to_upload = acquire_children_and_targets(target, args) + except Exception: + log.error("Failed to acquire target: %s", target_name) + if not is_user_admin(): + log.error("Try re-running as administrator/root") + acquire_gui.message("This application must be run as administrator.") + raise + + files_to_upload = sort_files(files_to_upload) + + except Exception as err: + log.error("Acquiring artifacts FAILED") + log.exception(err) + acquire_successful = False + else: + log.info("Acquiring artifacts succeeded") + + try: + # The auto-upload of files is done at the very very end to make sure any + # logged exceptions are written to the log file before uploading. + # This means that any failures from this point on will not be part of the + # uploaded log files, they will be written to the logfile on disk though. + if args.auto_upload and args.upload_plugin and files_to_upload: + try: + log_file_handler = get_file_handler(log) + if log_file_handler: + log_file_handler.close() + + upload_files(files_to_upload, args.upload_plugin) + except Exception: + if acquire_gui: + acquire_gui.message("Failed to upload files") + raise + + if acquire_gui: + acquire_gui.finish() + acquire_gui.wait_for_quit() + + except Exception as err: + acquire_successful = False + log.exception(err) + + if acquire_successful: + exit_success(args.config.get("arguments")) + else: + exit_failure(args.config.get("arguments")) + + +def load_child(target: Target, child_path: Path) -> None: + log.info("") + log.info("Loading child target %s", child_path) + try: + child = target.open_child(child_path) + log.info(target) + except Exception: + log.exception("Failed to load child target") + raise + + return child + + +def acquire_children_and_targets(target: Target, args: argparse.Namespace) -> list[str | Path]: + if args.child: + target = load_child(target, args.child) + + log.info("") + + files = [] + acquire_gui = GUI() + + counter = 0 + progress_limit = 50 if args.auto_upload else 90 + total_targets = 0 + if args.children: + total_targets += len(list(target.list_children())) + + if (args.children and not args.skip_parent) or not args.children: + total_targets += 1 + counter += 1 + acquire_gui.shard = int((progress_limit / total_targets) * counter) + try: + files.extend(acquire_target(target, args, args.start_time)) + + except Exception: + log.error("Failed to acquire main target") + acquire_gui.message("Failed to acquire target") + acquire_gui.wait_for_quit() + raise + + if args.children: + for child in target.list_children(): + counter += 1 + acquire_gui.shard = int((progress_limit / total_targets) * counter) + try: + child_target = load_child(target, child.path) + except Exception: + continue + + log.info("") + + try: + child_files = acquire_target(child_target, args) + files.extend(child_files) + except Exception: + log.exception("Failed to acquire child target %s", child_target.name) + acquire_gui.message("Failed to acquire child target") + continue + + return files + + +def sort_files(files: list[Union[str, Path]]) -> list[Path]: + log_files: list[Path] = [] + tar_paths: list[Path] = [] + report_paths: list[Path] = [] + + suffix_map = {".log": log_files, ".json": report_paths} + + for file in files: + if isinstance(file, str): + file = Path(file) + + suffix_map.get(file.suffix, tar_paths).append(file) + + # Reverse log paths, as the first one in ``files`` is the main one. + log_files.reverse() + + return tar_paths + report_paths + log_files + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + sys.exit(1) + except Exception: + sys.exit(1) From a01f0a72f37126a4abea96421d43368edc922e24 Mon Sep 17 00:00:00 2001 From: lhotlan64 <> Date: Thu, 24 Oct 2024 20:48:22 +0200 Subject: [PATCH 5/5] run tox formatter --- acquire/acquire.py | 24 +++++--- acquire/dynamic/windows/arp.py | 88 +++++++++++++++++------------ acquire/dynamic/windows/iphlpapi.py | 2 +- acquire/dynamic/windows/netstat.py | 40 +++++++------ 4 files changed, 87 insertions(+), 67 deletions(-) diff --git a/acquire/acquire.py b/acquire/acquire.py index caa7b786..5297ccb9 100644 --- a/acquire/acquire.py +++ b/acquire/acquire.py @@ -29,17 +29,17 @@ from dissect.util.stream import RunlistStream from acquire.collector import Collector, get_full_formatted_report, get_report_summary -from acquire.dynamic.windows.named_objects import NamedObjectType from acquire.dynamic.windows.arp import ( NetAdapter, - get_windows_network_adapters, + format_net_neighbors_list, get_windows_net_neighbors, - format_net_neighbors_list + get_windows_network_adapters, ) +from acquire.dynamic.windows.named_objects import NamedObjectType from acquire.dynamic.windows.netstat import ( NetConnection, + format_net_connections_list, get_active_connections, - format_net_connections_list ) from acquire.esxi import esxi_memory_context_manager from acquire.gui import GUI @@ -395,10 +395,14 @@ class Netstat(Module): def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector) -> None: net_connections: list[NetConnection] = get_active_connections() output = format_net_connections_list(net_connections) - - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + + output_base = ( + fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) + if collector.base + else collector.COMMAND_OUTPUT_BASE + ) full_output_path = fsutil.join(output_base, "netstat") - + collector.output.write_bytes(full_output_path, output.encode()) collector.report.add_command_collected(cls.__name__, ["netstat", "-a", "-n", "-o"]) @@ -442,7 +446,11 @@ def _run(cls, target: Target, cli_args: argparse.Namespace, collector: Collector output = format_net_neighbors_list(neighbors) - output_base = fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) if collector.base else collector.COMMAND_OUTPUT_BASE + output_base = ( + fsutil.join(collector.base, collector.COMMAND_OUTPUT_BASE) + if collector.base + else collector.COMMAND_OUTPUT_BASE + ) full_output_path = fsutil.join(output_base, "arp-cache") collector.output.write_bytes(full_output_path, output.encode()) diff --git a/acquire/dynamic/windows/arp.py b/acquire/dynamic/windows/arp.py index 3d391490..cd383fca 100644 --- a/acquire/dynamic/windows/arp.py +++ b/acquire/dynamic/windows/arp.py @@ -69,22 +69,30 @@ def from_adapter_addresses(addresses: IP_ADAPTER_ADDRESSES) -> NetAdapter: type=type, status=status, ) - + @staticmethod def header_fields() -> list[str]: - return ["Index", "Adapter Name", "Description", "Friendly Name", - "MAC Address", "MTU", "Type", "Operation Status"] - + return [ + "Index", + "Adapter Name", + "Description", + "Friendly Name", + "MAC Address", + "MTU", + "Type", + "Operation Status", + ] + def as_dict(self, indent=0) -> dict: return { - 'index': self.index, - 'name': self.name, - 'description': self.description, - 'friendly_name': self.friendly_name, - 'mac': self.physical_address, - 'mtu': self.mtu, - 'type': self.type.name, - 'status': self.operation_status.name + "index": self.index, + "name": self.name, + "description": self.description, + "friendly_name": self.friendly_name, + "mac": self.physical_address, + "mtu": self.mtu, + "type": self.type.name, + "status": self.operation_status.name, } def __str__(self) -> str: @@ -97,26 +105,26 @@ def __str__(self) -> str: class NetNeighbor: def __init__( - self, - family: ADDRESS_FAMILY, - address: str, - mac: str | None, - state: NL_NEIGHBOR_STATE, - adapter: NetAdapter | None - ): - self.family: ADDRESS_FAMILY = family - self.address: str = address - self.mac: str | None = mac - self.state: NL_NEIGHBOR_STATE = state - self.adapter: NetAdapter | None = adapter + self, + family: ADDRESS_FAMILY, + address: str, + mac: str | None, + state: NL_NEIGHBOR_STATE, + adapter: NetAdapter | None, + ): + self.family: ADDRESS_FAMILY = family + self.address: str = address + self.mac: str | None = mac + self.state: NL_NEIGHBOR_STATE = state + self.adapter: NetAdapter | None = adapter def as_dict(self) -> dict: return { - 'family': self.family.name, - 'address': self.address, - 'mac': self.mac if self.mac else '', - 'state': self.state.name, - 'adapter': self.adapter.as_dict() + "family": self.family.name, + "address": self.address, + "mac": self.mac if self.mac else "", + "state": self.state.name, + "adapter": self.adapter.as_dict(), } def __str__(self) -> str: @@ -186,8 +194,13 @@ def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: mac = format_physical_address(row.PhysicalAddress, row.PhysicalAddressLength) adapter = get_adapter_by_index(adapters, row.InterfaceIndex) - neighbor = NetNeighbor(family=ADDRESS_FAMILY(row.Address.si_family), address=address, mac=mac, - state=NL_NEIGHBOR_STATE(row.State), adapter=adapter) + neighbor = NetNeighbor( + family=ADDRESS_FAMILY(row.Address.si_family), + address=address, + mac=mac, + state=NL_NEIGHBOR_STATE(row.State), + adapter=adapter, + ) neighbors.append(neighbor) FreeMibTable(table_pointer) @@ -197,13 +210,14 @@ def get_windows_net_neighbors(adapters: list[NetAdapter]) -> list[NetNeighbor]: def format_net_neighbors_csv(net_neighbors: list[NetNeighbor]) -> str: def formatter(neighbor: NetNeighbor) -> str: - return f",".join([str(neighbor.adapter.index), neighbor.address, neighbor.mac if neighbor.mac else "", - neighbor.state.name]) - + return f",".join( + [str(neighbor.adapter.index), neighbor.address, neighbor.mac if neighbor.mac else "", neighbor.state.name] + ) + header = ",".join(["interface_index", "ip_address", "mac", "state"]) rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) - return f"{header}\n{rows}" + return f"{header}\n{rows}" def format_net_neighbors_json(net_neighbors: list[NetNeighbor], indent=0) -> str: @@ -214,9 +228,9 @@ def format_net_neighbors_list(net_neighbors: list[NetNeighbor]) -> str: def formatter(neighbor: NetNeighbor) -> str: mac = neighbor.mac if neighbor.mac else "" return f"{neighbor.adapter.index:<10}{neighbor.address:<60}{mac:<20}{neighbor.state.name:<20}" - + header = f"{'ifIndex':<10}{'IP Address':<60}{'MAC Address':<20}{'State':<20}" - header += "\n" + ('=' * len(header)) + header += "\n" + ("=" * len(header)) rows = "\n".join(formatter(neighbor) for neighbor in net_neighbors) return f"{header}\n{rows}" diff --git a/acquire/dynamic/windows/iphlpapi.py b/acquire/dynamic/windows/iphlpapi.py index eadfa91e..82073a43 100644 --- a/acquire/dynamic/windows/iphlpapi.py +++ b/acquire/dynamic/windows/iphlpapi.py @@ -388,7 +388,7 @@ class MIB_IPNET_ROW2(ctypes.Structure): # To correct for this, we add an extra four bytes of padding after the # `InterfaceIndex` member. if BITNESS == 32: - _fields_.insert(2, ('Padding', DWORD)) + _fields_.insert(2, ("Padding", DWORD)) class MIB_IPNET_TABLE2(ctypes.Structure): diff --git a/acquire/dynamic/windows/netstat.py b/acquire/dynamic/windows/netstat.py index 7ba4208c..9b3db0d2 100644 --- a/acquire/dynamic/windows/netstat.py +++ b/acquire/dynamic/windows/netstat.py @@ -65,13 +65,13 @@ def __init__( def as_dict(self) -> dict: return { - 'protocol': self.protocol.name, - 'local_address': self.local_address, - 'local_port': self.local_port, - 'remote_address': self.remote_address, - 'remote_port': self.remote_port, - 'state': self.state.name if self.state else None, - 'pid': self.pid + "protocol": self.protocol.name, + "local_address": self.local_address, + "local_port": self.local_port, + "remote_address": self.remote_address, + "remote_port": self.remote_port, + "state": self.state.name if self.state else None, + "pid": self.pid, } def __str__(self) -> str: @@ -195,19 +195,20 @@ def formatter(connection: NetConnection) -> str: rhost = connection.remote_address if connection.remote_address else "" rport = str(connection.remote_port) if connection.remote_port else "" state = connection.state.name if connection.state else "" - return ",".join([connection.protocol.name, connection.local_address, - str(connection.local_port), rhost, rport, state]) - - header = ",".join(["protocol", "local address", "local port", "remote address", - "remote port", "state"]) + return ",".join( + [connection.protocol.name, connection.local_address, str(connection.local_port), rhost, rport, state] + ) + + header = ",".join(["protocol", "local address", "local port", "remote address", "remote port", "state"]) rows = "\n".join(formatter(connection) for connection in net_connections) - return f"{header}\n{rows}" + return f"{header}\n{rows}" def format_net_connections_json(net_connections: list[NetConnection], indent=0) -> str: - return dumps(net_connections, default=lambda connection: connection.as_dict(), - indent=indent if indent > 0 else None) + return dumps( + net_connections, default=lambda connection: connection.as_dict(), indent=indent if indent > 0 else None + ) def format_net_connections_list(net_connections: list[NetConnection]) -> str: @@ -222,13 +223,10 @@ def formatter(connection: NetConnection) -> str: else: rconn = "*:*" - return ( - f"{connection.protocol.name:<10}{lconn:<40}{rconn:<40}" - f"{state:<20}{str(connection.pid):<10}" - ) - + return f"{connection.protocol.name:<10}{lconn:<40}{rconn:<40}" f"{state:<20}{str(connection.pid):<10}" + header = f"{'Proto':<10}{'Local Address':<40}{'Foreign Address':<40}{'State':<20}{'PID':<10}" - header += "\n" + ('=' * len(header)) + header += "\n" + ("=" * len(header)) rows = "\n".join(formatter(connection) for connection in net_connections) return f"{header}\n{rows}"