From a4c7c647490832ac4ec937f098399a07300dd4e8 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:28:33 -0300 Subject: [PATCH 01/10] Refactor Discord account Updated to v2. --- scripts/artifacts/discordAcct.py | 100 ++++++++++++++----------------- 1 file changed, 44 insertions(+), 56 deletions(-) diff --git a/scripts/artifacts/discordAcct.py b/scripts/artifacts/discordAcct.py index d3ecfc91f..c37e312a6 100644 --- a/scripts/artifacts/discordAcct.py +++ b/scripts/artifacts/discordAcct.py @@ -1,15 +1,20 @@ -import gzip -import re -import os -import json -import shutil -import errno -from pathlib import Path -import string +__artifacts_v2__ = { + "get_discordAcct": { # This should match the function name exactly + "name": "Discord - Account", + "description": "Parses Discord accounts", + "author": "", + "creation_date": "", + "last_updated": "2025-11-25", + "requirements": "none", + "category": "Discord", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/mmkv/mmkv.default'), + "output_types": "standard", # or ["html", "tsv", "timeline", "lava"] + } +} -from packaging import version -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, logdevinfo, timeline, tsv, is_platform_windows +import string +from scripts.ilapfuncs import artifact_processor def strings(filename, min=4): with open(filename, errors="ignore") as f: # Python 3.x @@ -25,50 +30,33 @@ def strings(filename, min=4): if len(result) >= min: # catch result at EOF yield result -def get_discordAcct(files_found, report_folder, seeker, wrap_text, timezone_offset): - searchlist = [] - for file_found in files_found: - file_found = str(file_found) - - for s in strings(file_found): - #print(type(s)) - #print(s) - searchlist.append(str(s),) - - counter = 0 - data_list = [] - for x in searchlist: - counter += 1 - if 'user_id_cache' in x: - #print(x) - wf = searchlist[counter].split('"') - try: - data_list.append(('USER_ID_CACHE', wf[1])) - except: - pass - - if 'email_cache' in x: - #print(x) - wfa = searchlist[counter].split('"') - try: - data_list.append(('EMAIL_CACHE', wfa[1])) - except: - pass +@artifact_processor +def get_discordAcct(context): + data_list = [] + data_headers = ('Key Name', 'Data Value', 'Source File') + for file_found in context.get_files_found(): + file_found = str(file_found) + searchlist = [] + for s in strings(file_found): + searchlist.append(str(s),) - if len(data_list) > 0: - report = ArtifactHtmlReport('Discord Account') - report.start_artifact_report(report_folder, 'Discord Account') - report.add_script() - data_headers = ('Key', 'Value') - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = 'Discord Account' - tsv(report_folder, data_headers, data_list, tsvname) + counter = 0 + for x in searchlist: + counter += 1 + if 'user_id_cache' in x: + wf = searchlist[counter].split('"') + try: + data_list.append(('USER_ID_CACHE', wf[1], file_found)) + except: + pass + + if 'email_cache' in x: + #print(x) + wfa = searchlist[counter].split('"') + try: + data_list.append(('EMAIL_CACHE', wfa[1], file_found)) + except: + pass -__artifacts__ = { - "discordacct": ( - "Discord", - ('*/mobile/Containers/Data/Application/*/Documents/mmkv/mmkv.default'), - get_discordAcct) -} \ No newline at end of file + return data_headers, data_list, 'see Source File for more info' + \ No newline at end of file From 00740023936b2175782a092c01bfa786d0636470 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:29:01 -0300 Subject: [PATCH 02/10] Refactor discordChats Updated the discordChats artifact to use context --- scripts/artifacts/discordChats.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/artifacts/discordChats.py b/scripts/artifacts/discordChats.py index 4ae1fddf8..0b0c9ea20 100644 --- a/scripts/artifacts/discordChats.py +++ b/scripts/artifacts/discordChats.py @@ -4,7 +4,7 @@ "description": "Parses Discord chat messages from FSCacheData and \'a\' database", "author": "Original Unknown, John Hyla & @stark4n6", "creation_date": "", - "last_updated": "2025-06-23", + "last_updated": "2025-11-25", "requirements": "none", "category": "Discord", "notes": "", @@ -21,12 +21,12 @@ import os import re -from scripts.ilapfuncs import artifact_processor, logfunc, logdevinfo, media_to_html, get_resolution_for_model_id, get_file_path, get_sqlite_db_records +from scripts.ilapfuncs import artifact_processor, logfunc, media_to_html, get_resolution_for_model_id, get_file_path, get_sqlite_db_records @artifact_processor -def discordChats(files_found, report_folder, seeker, wrap_text, timezone_offset): - - pathedhead = pathedtail = '' +def discordChats(context): + files_found = context.get_files_found() + report_folder = context.get_report_folder() def reduceSize(width: int, height: int, max_width: int, max_height: int) -> (int, int): if width > height: @@ -166,7 +166,7 @@ def process_json(jsonfinal): y = y + 1 - pathedhead, pathedtail = os.path.split(pathed) + _, pathedtail = os.path.split(pathed) if timestamp == '': pass @@ -225,7 +225,6 @@ def process_json(jsonfinal): db_records = get_sqlite_db_records(source_path, query) for record in db_records: - attach_name = message_type = call_end = sender_id = '' blob_data = record[0] if len(blob_data) > 1: blob_data = blob_data[1:] @@ -236,7 +235,8 @@ def process_json(jsonfinal): except ValueError as e: pass - #logfunc('JSON error: %s' % e) - data_headers = ('Timestamp','Edited Timestamp','Username','Bot?','Content','Attachments','User ID','Channel ID','Embedded Author','Author URL','Author Icon URL','Embedded URL','Embedded Script','Footer Text', 'Footer Icon URL', 'Source File') + data_headers = (('Timestamp', 'datetime'), ('Edited Timestamp', 'datetime'), 'Username', 'Bot?', 'Content', 'Attachments', + 'User ID', 'Channel ID', 'Embedded Author', 'Author URL', 'Author Icon URL', 'Embedded URL', 'Embedded Script', + 'Footer Text', 'Footer Icon URL', 'Source File') return data_headers, data_list, 'See source file(s) below:' \ No newline at end of file From d8e23e3f24d3104728d4bf6b2c9c5aaebf05a9ff Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:29:41 -0300 Subject: [PATCH 03/10] Refactor Discord manifest Updated the Discord manifest to v2. --- scripts/artifacts/discordManifest.py | 47 +++++++++++++--------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/scripts/artifacts/discordManifest.py b/scripts/artifacts/discordManifest.py index 74ac7ad03..9d2dd19fd 100644 --- a/scripts/artifacts/discordManifest.py +++ b/scripts/artifacts/discordManifest.py @@ -1,16 +1,26 @@ +__artifacts_v2__ = { + "get_discordManifest": { # This should match the function name exactly + "name": "Discord - Manifest", + "description": "Parses Discord manifest", + "author": "", + "creation_date": "", + "last_updated": "2025-11-25", + "requirements": "none", + "category": "Discord", + "notes": "", + "paths": ('*/mobile/Containers/Data/Application/*/Documents/RCTAsyncLocalStorage_V1/manifest.json'), + "output_types": "standard", # or ["html", "tsv", "timeline", "lava"] + } +} import os import json -from pathlib import Path +from scripts.ilapfuncs import artifact_processor -from packaging import version -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, logdevinfo, timeline, tsv, is_platform_windows - - -def get_discordManifest(files_found, report_folder, seeker, wrap_text, timezone_offset): +@artifact_processor +def get_discordManifest(context): data_list = [] - for file_found in files_found: + for file_found in context.get_files_found(): file_found = str(file_found) if os.path.isfile(file_found): @@ -19,23 +29,8 @@ def get_discordManifest(files_found, report_folder, seeker, wrap_text, timezone_ jsonfinal = json.loads(jsondata) for key, value in jsonfinal.items(): - data_list.append((key, value)) + data_list.append((key, value, file_found)) - - if len(data_list) > 0: - report = ArtifactHtmlReport('Discord Manifest') - report.start_artifact_report(report_folder, 'Discord Manifest') - report.add_script() - data_headers = ('Key', 'Value') - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = 'Discord Manifest' - tsv(report_folder, data_headers, data_list, tsvname) + data_headers = ('Key Name', 'Data Value') -__artifacts__ = { - "discordmanifest": ( - "Discord", - ('*/mobile/Containers/Data/Application/*/Documents/RCTAsyncLocalStorage_V1/manifest.json'), - get_discordManifest) -} \ No newline at end of file + return data_headers, data_list, 'see Source File for more' \ No newline at end of file From edc3757c6f210af428f32ad5cbbd8395735bae55 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:30:24 -0300 Subject: [PATCH 04/10] Refactor DMSS artifact Updated to v2 and split the original DMSS artifact extraction into multiple functions for PINs, channels, device info, registered sensors, registered devices, notifications, and created media. --- scripts/artifacts/dmss.py | 613 +++++++++++++++++++++----------------- 1 file changed, 333 insertions(+), 280 deletions(-) diff --git a/scripts/artifacts/dmss.py b/scripts/artifacts/dmss.py index d86dfcc58..54a3a25dd 100644 --- a/scripts/artifacts/dmss.py +++ b/scripts/artifacts/dmss.py @@ -1,15 +1,87 @@ __artifacts_v2__ = { - "Dahua Technology (DMSS)": { + "get_dmss_pin": { "name": "Dahua Technology (DMSS)", - "description": "Extract data from Dahua Technology (DMSS) Application", + "description": "Extract PINs from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", - "version": "0.2", - "date": "2023-11-21", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", "requirements": "none", "category": "Dahua Technology (DMSS)", "notes": "", - "paths": ('*/Library/Support/Devices.sqlite3*','*/Library/Support/configFile1','*/Library/Support/*/DMSSCloud.sqlite*','*/Documents/Captures/*','*/Documents/Videos/*'), - "function": "get_dmss" + "paths": ('*/Library/Support/configFile1',), + "output_types": "standard", + }, + "get_dmss_channels": { + "name": "Dahua Technology (DMSS)", + "description": "Extract channels from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Library/Support/Devices.sqlite3*'), + "output_types": "standard", + }, + "get_dmss_info": { + "name": "Dahua Technology (DMSS)", + "description": "Extract info from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Library/Support/Devices.sqlite3*'), + "output_types": "standard", + }, + "get_dmss_registered_sensors": { + "name": "Dahua Technology (DMSS)", + "description": "Extract registered sensors from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Library/Support/*/DMSSCloud.sqlite*'), + "output_types": "standard", + }, + "get_dmss_registered_devices": { + "name": "Dahua Technology (DMSS)", + "description": "Extract registered devices from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Library/Support/*/DMSSCloud.sqlite*'), + "output_types": "standard", + }, + "get_dmss_notifications": { + "name": "Dahua Technology (DMSS)", + "description": "Extract notifications from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Library/Support/*/DMSSCloud.sqlite*'), + "output_types": "standard", + }, + "get_dmss_created_media": { + "name": "Dahua Technology (DMSS)", + "description": "Extract created media from Dahua Technology (DMSS) Application", + "author": "@theAtropos4n6", + "creation_date": "2023-11-21", + "last_update_date": "2025-11-25", + "requirements": "none", + "category": "Dahua Technology (DMSS)", + "notes": "", + "paths": ('*/Documents/Captures/*','*/Documents/Videos/*'), + "output_types": "standard", } } @@ -42,314 +114,295 @@ import sqlite3 import base64 -import plistlib import os -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly,media_to_html +from scripts.ilapfuncs import logfunc, open_sqlite_db_readonly, check_in_media, \ + artifact_processor, get_plist_content -def get_dmss(files_found, report_folder, seeker, wrap_text, timezone_offset): - separator_1 = '/' - separator_2 = "\\" - dmss_db_file_list = [] - media_data_list = [] - for file_found in files_found: +@artifact_processor +def get_dmss_pin(context): + data_list = [] + for file_found in context.get_files_found(): file_found = str(file_found) - file_name = os.path.basename(file_found) - if file_name == 'configFile1': - #-Dahua App's PIN - pin_code = "No Pass" - with open(file_found,'rb') as f: - encoded_data = f.read() - + #-Dahua App's PIN + pin_code = "No Pass" + with open(file_found,'rb') as f: + encoded_data = f.read() decoded_data = base64.b64decode(encoded_data) - - plist_data = plistlib.loads(decoded_data) + plist_data = get_plist_content(decoded_data) for k,v in plist_data.items(): if k == "$objects": if "5" == str(v[3]) and "True" == str(v[4]): pin_code = v[5] #v[5] value is where the PIN gets stored - - if pin_code == "No Pass": - logfunc(f'No PIN was set for Dahua Application') - else: - report = ArtifactHtmlReport('Dahua App PIN') - report.start_artifact_report(report_folder, 'Dahua App PIN') - report.add_script() - data_headers = ('PIN',) - data_list = ((pin_code,),) - #data_list.append(pin_code,) - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua App PIN' - tsv(report_folder,data_headers, data_list, tsvname) - - if file_name == 'Devices.sqlite3': + + data_list.append((pin_code, file_found)) + + data_headers = ('PIN', 'Source File') + return data_headers, data_list, 'see Source File for more info' + +@artifact_processor +def get_dmss_channels(context): + data_headers = ('Device Name','Channel ID','Channel Name', 'Source file') + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + + cursor.execute(''' + select + DEVICES.name, + CHANNELID, + CHANNELS.NAME + from CHANNELS + JOIN DEVICES ON CHANNELS.DEVICEID = DEVICES.ID + ''') + + all_rows = cursor.fetchall() + for row in all_rows: + data_list.append((row[0],row[1],row[2], file_found)) + db.close() + + return data_headers, data_list, 'see Source File for more info' + +@artifact_processor +def get_dmss_info(context): + data_headers = ('Name', 'IP/SN/Domain', 'Port', 'User', 'Password (Enc.)', 'DDNS Enabled', 'DDNS Address', 'DDNS Domain', + 'DDNS Server Port', 'DDNS Username', 'DDNS Password (Enc.)', 'DDNS Type', 'DDNS Alias', 'Source File') + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + + cursor.execute(''' + select + DEVICES.NAME, + DEVICES.IP, + DEVICES.PORT, + DEVICES.USER, + DEVICES.PASSWORD, + CASE + when DDNSCONFIG.DDNSENABLE='0' THEN 'False' + when DDNSCONFIG.DDNSENABLE='1' THEN 'True' + END DDNSENABLE, + DDNSCONFIG.DDNSADDRESS, + DDNSCONFIG.DDNSDOMAIN, + DDNSCONFIG.DDNSSERVERPORT, + DDNSCONFIG.DDNSUSERNAME, + DDNSCONFIG.DDNSPASSWORD, + DDNSCONFIG.DDNSTYPE, + DDNSCONFIG.DDNSALIAS + from DEVICES + JOIN DDNSCONFIG ON DEVICES.IP = DDNSCONFIG.DEVICEID + ''') + + all_rows = cursor.fetchall() + + for row in all_rows: + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], + row[11], row[12], file_found)) + db.close() + + return data_headers, data_list, 'see Source File for more info' + +@artifact_processor +def get_dmss_registered_sensors(context): + separator_1 = '/' + separator_2 = "\\" + dmss_db_file_list = [] + + data_headers = ('Device Name', 'Device Model', 'Device SN', 'Device Type', 'Alarm State', 'Battery Percent', + 'Associated Hub SN', 'Online State', 'Door State Sensor', 'Full Day Alarm', 'Tamper Status', 'Owner', 'Source File') + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) + try: db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - #Dahua CCTV - Channels cursor.execute(''' select - DEVICES.name, - CHANNELID, - CHANNELS.NAME - from CHANNELS - JOIN DEVICES ON CHANNELS.DEVICEID = DEVICES.ID + partName, + partModel, + partSN, + paasType, + CASE + when alarmState='0' then 'off' + when alarmState='1' then 'on' + end alarmState, + batteryPercent, + boxSN,--Hub + CASE + when onLineState='0' then 'off' + when onLineState='1' then 'on' + end onLineState, + doorState, + CASE + when fullDayAlarm='0' then 'off' + when fullDayAlarm='1' then 'on' + end fullDayAlarm, + tamper + from GatewayPartTable ''') all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Dahua - CCTV Channels') - report.start_artifact_report(report_folder, 'Dahua - CCTV Channels') - report.add_script() - data_headers = ('Device Name','Channel ID','Channel Name') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua - CCTV Channels' - tsv(report_folder, data_headers, data_list, tsvname) - + db_owner = "N/A" + if separator_1 in file_found: + dmss_db_file_list = file_found.split(separator_1) else: - logfunc(f'No Dahua - CCTV Channels data available') + dmss_db_file_list = file_found.split(separator_2) + db_owner = "(without DMSS account)" if str(dmss_db_file_list[-2]) == "0" else f'(Account- {str(dmss_db_file_list[-2])})' + + for row in all_rows: + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], db_owner, file_found)) + + db.close() + except sqlite3.OperationalError as e: + logfunc(f'Error - {e}') + + return data_headers, data_list, 'see Source File for more info' + +@artifact_processor +def get_dmss_registered_devices(context): + separator_1 = '/' + separator_2 = "\\" + dmss_db_file_list = [] + data_headers = ('Device Name', 'Device Model', 'Device SN', 'Device Type', 'Channels', 'Online', 'Receive Share From', + 'Send Share To', 'Username', 'Device Capabilities', 'Sup. Capabilities', 'Channels Capabilities', 'Port', + 'RTSP Port', 'Hardware ID', 'Owner', 'Source File') + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) + try: + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() - #Dahua CCTV - Info cursor.execute(''' select - DEVICES.NAME, - DEVICES.IP, - DEVICES.PORT, - DEVICES.USER, - DEVICES.PASSWORD, - CASE - when DDNSCONFIG.DDNSENABLE='0' THEN 'False' - when DDNSCONFIG.DDNSENABLE='1' THEN 'True' - END DDNSENABLE, - DDNSCONFIG.DDNSADDRESS, - DDNSCONFIG.DDNSDOMAIN, - DDNSCONFIG.DDNSSERVERPORT, - DDNSCONFIG.DDNSUSERNAME, - DDNSCONFIG.DDNSPASSWORD, - DDNSCONFIG.DDNSTYPE, - DDNSCONFIG.DDNSALIAS + name, + serial, + sn, + devType, + channelCount, + deviceIsOnline, + ReceiveShare, + SendShareStr, + userName, + DeviceCS, + SupCaps, + ChannelCS, + mPort, + mRtspPort, + hwId from DEVICES - JOIN DDNSCONFIG ON DEVICES.IP = DDNSCONFIG.DEVICEID - ''') - + ''') + all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Dahua CCTV - Info') - report.start_artifact_report(report_folder, 'Dahua CCTV - Info') - report.add_script() - data_headers = ('Name','IP/SN/Domain','Port','User','Password (Enc.)','DDNS Enabled','DDNS Address','DDNS Domain','DDNS Server Port','DDNS Username','DDNS Password (Enc.)','DDNS Type','DDNS Alias') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua CCTV - Info' - tsv(report_folder, data_headers, data_list, tsvname) - + + if separator_1 in file_found: + dmss_db_file_list = file_found.split(separator_1) else: - logfunc(f'No Dahua CCTV - Info data available') - + dmss_db_file_list = file_found.split(separator_2) + db_owner = "(without DMSS account)" if str(dmss_db_file_list[-2]) == "0" else f'(Account- {str(dmss_db_file_list[-2])})' + + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14], db_owner, file_found)) + db.close() + except sqlite3.OperationalError as e: + logfunc(f'Error - {e}') + + return data_headers, data_list, 'see Source File for more info' - if file_name == 'DMSSCloud.sqlite': - try: - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - - #-Dahua IoT - Registered Sensors - cursor.execute(''' - select - partName, - partModel, - partSN, - paasType, - CASE - when alarmState='0' then 'off' - when alarmState='1' then 'on' - end alarmState, - batteryPercent, - boxSN,--Hub - CASE - when onLineState='0' then 'off' - when onLineState='1' then 'on' - end onLineState, - doorState, - CASE - when fullDayAlarm='0' then 'off' - when fullDayAlarm='1' then 'on' - end fullDayAlarm, - tamper - from GatewayPartTable - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - db_owner = "N/A" - if usageentries > 0: - if separator_1 in file_found: - dmss_db_file_list = file_found.split(separator_1) - else: - dmss_db_file_list = file_found.split(separator_2) - db_owner = "(without DMSS account)" if str(dmss_db_file_list[-2]) == "0" else f'(Account- {str(dmss_db_file_list[-2])})' - - report = ArtifactHtmlReport(f'Dahua IoT - Registered Sensors {db_owner}') - report.start_artifact_report(report_folder, f'Dahua IoT - Registered Sensors {db_owner}') - report.add_script() - data_headers = ('Device Name','Device Model','Device SN','Device Type','Alarm State','Battery Percent','Associated Hub SN','Online State','Door State Sensor','Full Day Alarm','Tamper Status') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua IoT - Registered Sensors {db_owner}' - tsv(report_folder, data_headers, data_list, tsvname) - - else: - logfunc(f'No Dahua IoT - Registered Sensors {db_owner} data available') - - #-Dahua IoT - Registered Devices - cursor.execute(''' - select - name, - serial, - sn, - devType, - channelCount, - deviceIsOnline, - ReceiveShare, - SendShareStr, - userName, - DeviceCS, - SupCaps, - ChannelCS, - mPort, - mRtspPort, - hwId - from DEVICES - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport(f'Dahua IoT - Registered Devices {db_owner}') - report.start_artifact_report(report_folder, f'Dahua IoT - Registered Devices {db_owner}') - report.add_script() - data_headers = ('Device Name','Device Model','Device SN','Device Type','Channels','Online','Receive Share From','Send Share To','Username','Device Capabilities','Sup. Capabilities','Channels Capabilities','Port','RTSP Port','Hardware ID') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua IoT - Registered Devices {db_owner}' - tsv(report_folder, data_headers, data_list, tsvname) - - else: - logfunc(f'No Dahua IoT - Registered Devices {db_owner} data available') - - #-Dahua IoT - Notifications - cursor.execute(''' - select - CHNALARMMESSAGE.TIME, - SensorName, - SensorSN, - AreaName, - NickName, - DEVICES.name, - DEVICEID,--SN - DEVICES.devType, - ALARMID, - case - when CHNALARMMESSAGE.TYPE = 'gwMsg_ATSFault_Start' then 'ATS fault. Check network connection' - when CHNALARMMESSAGE.TYPE = 'gwMsg_ATSFault_Stop' then 'ATS restored' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaAlarm_AddArea' then '"area name", added by "nickname"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaAlarm_AreaDelete' then '"area name", removed by "nickname"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaArmModeChange_Remote_DisArm' then '"area name", disarmed by "nickname"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaArmModeChange_Remote_Arm_p1' then '"area name", Home mode activated by "nickname"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_ArmingFailure' then 'Unsuccessful arming "area name" attempt by "nickname"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_PassiveInfrared' then 'Motion detected, "device name" in "area name"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_DoorMagnetism_Start' then 'Opening detected, "device name" in "area name"' - when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_DoorMagnetism_Stop' then 'Closing detected, "device name" in "area name"' - else CHNALARMMESSAGE.TYPE - end TYPE, - case - when ISCHECKED = '1' then 'Yes' - when ISCHECKED = '0' then 'No' - end ISCHECKED - from CHNALARMMESSAGE - JOIN DEVICES ON DEVICES.sn = CHNALARMMESSAGE.DEVICEID - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport(f'Dahua IoT - Notifications {db_owner}') - report.start_artifact_report(report_folder, f'Dahua IoT - Registered Notifications {db_owner}') - report.add_script() - data_headers = ('Timestamp (Local)','Sensor/Area/Nick Name','Sensor SN','Area Name','Nickname','Associated Device Name','Associated Device SN','Associated Device Type','Alarm Message ID','Alarm Notification','Checked') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Dahua IoT - Notifications {db_owner}' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity =f'Dahua IoT - Notifications {db_owner}' - timeline(report_folder, tlactivity, data_list, data_headers) +@artifact_processor +def get_dmss_notifications(context): + separator_1 = '/' + separator_2 = "\\" + dmss_db_file_list = [] + data_headers = (('Timestamp (Local)', 'datetime'), 'Sensor/Area/Nick Name', 'Sensor SN', 'Area Name', 'Nickname', + 'Associated Device Name', 'Associated Device SN', 'Associated Device Type', 'Alarm Message ID', + 'Alarm Notification', 'Checked', 'Owner', 'Source File') + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) + try: + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + + cursor.execute(''' + select + CHNALARMMESSAGE.TIME, + SensorName, + SensorSN, + AreaName, + NickName, + DEVICES.name, + DEVICEID,--SN + DEVICES.devType, + ALARMID, + case + when CHNALARMMESSAGE.TYPE = 'gwMsg_ATSFault_Start' then 'ATS fault. Check network connection' + when CHNALARMMESSAGE.TYPE = 'gwMsg_ATSFault_Stop' then 'ATS restored' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaAlarm_AddArea' then '"area name", added by "nickname"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaAlarm_AreaDelete' then '"area name", removed by "nickname"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaArmModeChange_Remote_DisArm' then '"area name", disarmed by "nickname"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AreaArmModeChange_Remote_Arm_p1' then '"area name", Home mode activated by "nickname"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_ArmingFailure' then 'Unsuccessful arming "area name" attempt by "nickname"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_PassiveInfrared' then 'Motion detected, "device name" in "area name"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_DoorMagnetism_Start' then 'Opening detected, "device name" in "area name"' + when CHNALARMMESSAGE.TYPE = 'gwMsg_AlarmLocal_DoorMagnetism_Stop' then 'Closing detected, "device name" in "area name"' + else CHNALARMMESSAGE.TYPE + end TYPE, + case + when ISCHECKED = '1' then 'Yes' + when ISCHECKED = '0' then 'No' + end ISCHECKED + from CHNALARMMESSAGE + JOIN DEVICES ON DEVICES.sn = CHNALARMMESSAGE.DEVICEID + ''') + + all_rows = cursor.fetchall() + + if separator_1 in file_found: + dmss_db_file_list = file_found.split(separator_1) + else: + dmss_db_file_list = file_found.split(separator_2) + db_owner = "(without DMSS account)" if str(dmss_db_file_list[-2]) == "0" else f'(Account- {str(dmss_db_file_list[-2])})' - else: - logfunc(f'No Dahua IoT - Notifications {db_owner} data available') - db.close() + for row in all_rows: + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], db_owner, file_found)) - except sqlite3.OperationalError as e: - logfunc(f'Error - {e}') + db.close() + + except sqlite3.OperationalError as e: + logfunc(f'Error - {e}') + + return data_headers, data_list, 'see Source File for more info' +@artifact_processor +def get_dmss_created_media(context): + files_found = context.get_files_found() + report_folder = context.get_report_folder() + + media_data_list = [] + data_headers = ('File Name', ('File Content', 'media'), 'Source File') + for file_found in files_found: + file_found = str(file_found) + file_name = os.path.basename(file_found) #Dahua CCTV - User Created Media: - Collecting Files if file_name.endswith(".jpg") or file_name.endswith(".mp4") or file_name.endswith(".dav"): if file_name.endswith(".jpg") and "Videos" in file_found: #we intentionally left out thumbnails of snapshots and video files to reduce media pass else: temp_tuple = () - temp_tuple = (file_found,file_name,file_name) + temp_tuple = (file_name,file_name,file_found) media_data_list.append(temp_tuple) - #Dahua CCTV - User Created Media: - Reporting Files - media_files = len(media_data_list) - if media_files > 0: - report = ArtifactHtmlReport('Dahua CCTV - User Created Media') - report.start_artifact_report(report_folder, 'Dahua CCTV - User Created Media') - report.add_script() - data_headers = ('File Path','File Name', 'File Content') - data_list = [] - for mfile in media_data_list: - if mfile[2] is not None: - media = media_to_html(mfile[2], files_found, report_folder) - data_list.append((mfile[0],mfile[1],media)) - media_files_dir = "*/mobile/Containers/Data/Application/[Application-GUID]/Documents/Captures/* and */mobile/Containers/Data/Application/[Application-GUID]/Documents/Videos/*" #Generic path of the media files. - report.write_artifact_data_table(data_headers, data_list, media_files_dir, html_escape = False) - report.end_artifact_report() - - tsvname = f'Dahua CCTV - User Created Media' - tsv(report_folder, data_headers, data_list, tsvname) - - else: - logfunc(f'No Dahua CCTV - User Created Media data available') + data_list = [] + for mfile in media_data_list: + if mfile[1] is not None: + media = check_in_media(mfile[2], mfile[0]) + data_list.append((mfile[0],media, mfile[2])) + + return data_headers, data_headers, 'see Source File for more info' \ No newline at end of file From 515a2c5f998c02418a26a3348b34e88b1fa9a6a7 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 11:31:40 -0300 Subject: [PATCH 05/10] Refactor draft message updated to v2 --- scripts/artifacts/draftmessage.py | 63 +++++++++++++------------------ 1 file changed, 26 insertions(+), 37 deletions(-) diff --git a/scripts/artifacts/draftmessage.py b/scripts/artifacts/draftmessage.py index de4f70cbb..45b667dcc 100644 --- a/scripts/artifacts/draftmessage.py +++ b/scripts/artifacts/draftmessage.py @@ -1,14 +1,29 @@ +__artifacts_v2__ = { + "get_draftmessage": { # This should match the function name exactly + "name": "Draft Native Messages", + "description": "", + "author": "", + "creation_date": "", + "last_updated": "2025-11-25", + "requirements": "none", + "category": "Messages", + "notes": "", + "paths": ('*/SMS/Drafts/*/composition.plist'), + "output_types": "standard", # or ["html", "tsv", "timeline", "lava"] + "artifact_icon": "message-circle" + } +} + import os -import plistlib import nska_deserialize as nd -import datetime from pathlib import Path -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import artifact_processor, convert_unix_ts_in_seconds, get_plist_file_content -def get_draftmessage(files_found, report_folder, seeker, wrap_text, timezone_offset): +@artifact_processor +def get_draftmessage(context): data_list = [] - for file_found in files_found: + data_headers = (('Modified Time', 'datetime'),'Intended Recipient','Draft Message', 'Source file') + for file_found in context.get_files_found(): file_found = str(file_found) filename = os.path.basename(file_found) #reusing old code and adding new underneath. I know. "Cringe." path = Path(file_found) @@ -24,36 +39,10 @@ def get_draftmessage(files_found, report_folder, seeker, wrap_text, timezone_off continue modifiedtime = os.path.getmtime(file_found) - modifiedtime = (datetime.datetime.fromtimestamp(int(modifiedtime)).strftime('%Y-%m-%d %H:%M:%S')) - - with open(file_found, 'rb') as fp: - pl = plistlib.load(fp) - deserialized_plist = nd.deserialize_plist_from_string(pl['text']) - data_list.append((modifiedtime, directoryname, deserialized_plist['NSString'])) - - if len(data_list) > 0: - folderlocation = str(path.resolve().parents[1]) - description = '' - report = ArtifactHtmlReport(f'Drafts - Native Messages') - report.start_artifact_report(report_folder, f'Drafts - Native Messages', description) - report.add_script() - data_headers = ('Modified Time','Intended Recipient','Draft Message') - report.write_artifact_data_table(data_headers, data_list, folderlocation) - report.end_artifact_report() + modifiedtime = convert_unix_ts_in_seconds(modifiedtime) - tsvname = f'Drafts - Native Messages' - tsv(report_folder, data_headers, data_list, tsvname) # TODO: _csv.Error: need to escape, but no escapechar set - - tlactivity = f'Drafts - Native Messages' - timeline(report_folder, tlactivity, data_list, data_headers) - - else: - logfunc(f'No data available for Drafts - Native Messages') + pl = get_plist_file_content(file_found) + deserialized_plist = nd.deserialize_plist_from_string(pl['text']) + data_list.append((modifiedtime, directoryname, deserialized_plist.get('NSString', ''), file_found)) - -__artifacts__ = { - "draftmessage": ( - "Draft Native Messages", - ('*/SMS/Drafts/*/composition.plist'), - get_draftmessage) -} \ No newline at end of file + return data_headers, data_list, 'see Source File for more info' \ No newline at end of file From 56731f8f7205aecf97db4f036f23a805d7880781 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:23:01 -0300 Subject: [PATCH 06/10] discordAcct.py Update error handling by logging specific exceptions during parsing of user_id_cache and email_cache entries. Remove unnecessary pass statements --- scripts/artifacts/discordAcct.py | 35 ++++++++++++++++---------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/scripts/artifacts/discordAcct.py b/scripts/artifacts/discordAcct.py index c37e312a6..cb05d8b9f 100644 --- a/scripts/artifacts/discordAcct.py +++ b/scripts/artifacts/discordAcct.py @@ -14,21 +14,22 @@ } import string -from scripts.ilapfuncs import artifact_processor +from scripts.ilapfuncs import artifact_processor, logfunc -def strings(filename, min=4): - with open(filename, errors="ignore") as f: # Python 3.x +def strings(filename, min_length=4): + with open(filename, "rb") as f: # Python 3.x # with open(filename, "rb") as f: # Python 2.x - result = "" - for c in f.read(): - if c in string.printable: - result += c - continue - if len(result) >= min: - yield result - result = "" - if len(result) >= min: # catch result at EOF - yield result + result = "" + for c in f.read(): + char = chr(c) + if char in string.printable: + result += char + continue + if len(result) >= min_length: + yield result + result = "" + if len(result) >= min_length: # catch result at EOF + yield result @artifact_processor def get_discordAcct(context): @@ -47,16 +48,16 @@ def get_discordAcct(context): wf = searchlist[counter].split('"') try: data_list.append(('USER_ID_CACHE', wf[1], file_found)) - except: - pass + except (IndexError, TypeError) as e: + logfunc(f"Error parsing user_id_cache: {str(e)}") if 'email_cache' in x: #print(x) wfa = searchlist[counter].split('"') try: data_list.append(('EMAIL_CACHE', wfa[1], file_found)) - except: - pass + except (IndexError, TypeError) as e: + logfunc(f"Error parsing email_cache: {str(e)}") return data_headers, data_list, 'see Source File for more info' \ No newline at end of file From 8b63d138e645a7111b23d664ebb6d850c83e9d4c Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:24:07 -0300 Subject: [PATCH 07/10] discordChats.py logging for JSON parsing errors Removes unnecessary pass statements and f-strings --- scripts/artifacts/discordChats.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/scripts/artifacts/discordChats.py b/scripts/artifacts/discordChats.py index 0b0c9ea20..c2dad7a77 100644 --- a/scripts/artifacts/discordChats.py +++ b/scripts/artifacts/discordChats.py @@ -192,15 +192,15 @@ def process_json(jsonfinal): model_id = parsed_data.get('ProductType') if not model_id: - logfunc(f"Cannot detect model ID. Cannot link attachments") + logfunc("Cannot detect model ID. Cannot link attachments") break resolution = get_resolution_for_model_id(model_id) break if not activation_record_found: - logfunc(f'activation_record.plist not found. Unable to determine model/resolution for attachment linking') + logfunc('activation_record.plist not found. Unable to determine model/resolution for attachment linking') if not resolution: - logfunc(f"Cannot link attachments due to missing resolution") + logfunc("Cannot link attachments due to missing resolution") data_list = [] @@ -217,11 +217,8 @@ def process_json(jsonfinal): if isinstance(jsonfinal, list): jsonfinal = jsonfinal[0] process_json(jsonfinal) - else: - pass elif source_path: query = '''select data from messages0''' - #data_headers = (('Message Timestamp', 'datetime'), ('Edited Timestamp', 'datetime'),'Sender Username','Sender Global Name','Sender ID','Message','Attachment(s)','Message Type','Call Ended','Message ID','Channel ID',) db_records = get_sqlite_db_records(source_path, query) for record in db_records: @@ -234,7 +231,7 @@ def process_json(jsonfinal): process_json(jsonfinal) except ValueError as e: - pass + logfunc(f"Error parsing JSON from {file_found}: {str(e)}") data_headers = (('Timestamp', 'datetime'), ('Edited Timestamp', 'datetime'), 'Username', 'Bot?', 'Content', 'Attachments', 'User ID', 'Channel ID', 'Embedded Author', 'Author URL', 'Author Icon URL', 'Embedded URL', 'Embedded Script', From f20f76eb3e7f6f7fd381083f84cfaebdae8cc844 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:25:14 -0300 Subject: [PATCH 08/10] discordManifest Added UTF-8 encoding when opening files and ensured data is only appended if JSON is successfully loaded. --- scripts/artifacts/discordManifest.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/scripts/artifacts/discordManifest.py b/scripts/artifacts/discordManifest.py index 9d2dd19fd..0bcf34d52 100644 --- a/scripts/artifacts/discordManifest.py +++ b/scripts/artifacts/discordManifest.py @@ -19,18 +19,20 @@ @artifact_processor def get_discordManifest(context): - data_list = [] - for file_found in context.get_files_found(): - file_found = str(file_found) - - if os.path.isfile(file_found): - with open(file_found) as f_in: - for jsondata in f_in: - jsonfinal = json.loads(jsondata) + data_list = [] + for file_found in context.get_files_found(): + file_found = str(file_found) - for key, value in jsonfinal.items(): - data_list.append((key, value, file_found)) + jsonfinal = None + if os.path.isfile(file_found): + with open(file_found, encoding='utf-8') as f_in: + for jsondata in f_in: + jsonfinal = json.loads(jsondata) - data_headers = ('Key Name', 'Data Value') - - return data_headers, data_list, 'see Source File for more' \ No newline at end of file + if jsonfinal: + for key, value in jsonfinal.items(): + data_list.append((key, value, file_found)) + + data_headers = ('Key Name', 'Data Value') + + return data_headers, data_list, 'see Source File for more' \ No newline at end of file From 011ba350c057a7a2b51cf1b44cfb1e8a0eeddca6 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:25:49 -0300 Subject: [PATCH 09/10] DMSS --- scripts/artifacts/dmss.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/scripts/artifacts/dmss.py b/scripts/artifacts/dmss.py index 54a3a25dd..ec73fa61c 100644 --- a/scripts/artifacts/dmss.py +++ b/scripts/artifacts/dmss.py @@ -1,6 +1,6 @@ __artifacts_v2__ = { "get_dmss_pin": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - PIN", "description": "Extract PINs from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -12,7 +12,7 @@ "output_types": "standard", }, "get_dmss_channels": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Channels", "description": "Extract channels from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -24,7 +24,7 @@ "output_types": "standard", }, "get_dmss_info": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Info", "description": "Extract info from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -36,7 +36,7 @@ "output_types": "standard", }, "get_dmss_registered_sensors": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Sensors", "description": "Extract registered sensors from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -48,7 +48,7 @@ "output_types": "standard", }, "get_dmss_registered_devices": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Devices", "description": "Extract registered devices from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -60,7 +60,7 @@ "output_types": "standard", }, "get_dmss_notifications": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Notifications", "description": "Extract notifications from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -72,7 +72,7 @@ "output_types": "standard", }, "get_dmss_created_media": { - "name": "Dahua Technology (DMSS)", + "name": "Dahua Technology (DMSS) - Media", "description": "Extract created media from Dahua Technology (DMSS) Application", "author": "@theAtropos4n6", "creation_date": "2023-11-21", @@ -383,7 +383,6 @@ def get_dmss_notifications(context): @artifact_processor def get_dmss_created_media(context): files_found = context.get_files_found() - report_folder = context.get_report_folder() media_data_list = [] data_headers = ('File Name', ('File Content', 'media'), 'Source File') From 30c6964243d7cd32da05a7dd003438a2315fe776 Mon Sep 17 00:00:00 2001 From: Maite Nigro <86933780+Maite2003@users.noreply.github.com> Date: Sun, 4 Jan 2026 20:47:27 -0300 Subject: [PATCH 10/10] add enconding to discord chats --- scripts/artifacts/discordChats.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/artifacts/discordChats.py b/scripts/artifacts/discordChats.py index c2dad7a77..3eec84629 100644 --- a/scripts/artifacts/discordChats.py +++ b/scripts/artifacts/discordChats.py @@ -211,7 +211,7 @@ def process_json(jsonfinal): try: if not file_found.endswith('activation_record.plist') and os.path.isfile(file_found) and file_found != source_path: - with open(file_found, "r") as f_in: + with open(file_found, "r", encoding="utf-8") as f_in: for jsondata in f_in: jsonfinal = json.loads(jsondata) if isinstance(jsonfinal, list):