diff --git a/scripts/artifacts/LinkedIn.py b/scripts/artifacts/LinkedIn.py index 00bff608..de6c8c40 100644 --- a/scripts/artifacts/LinkedIn.py +++ b/scripts/artifacts/LinkedIn.py @@ -1,9 +1,6 @@ -# LinkedIn App (com.linkedin.android) -# Author: Marco Neumann (kalinko@be-binary.de) -# Version: 0.0.1 -# # Tested with the following versions: -# 2024-08-16: Android 14, App: 4.1966 +# 2024-08-16: Android 14, App: 4.1.966 +# 2025-02-07: Android 16, App: 4.1.1166 # Requirements: json, xml @@ -11,25 +8,27 @@ __artifacts_v2__ = { - "get_linkedin_account": { + "linkedin_account": { "name": "LinkedIn - Account", "description": "Existing account in LinkedIn App. The Public Identifier can be used to visit the public profile on the LinkedIn Website (https://www.linkedin.com/in/[Public Identifier]).", "author": "Marco Neumann {kalinko@be-binary.de}", - "version": "0.0.1", - "date": "2025-04-26", - "requirements": "xml", + "version": "0.1", + "creation_date": "2025-04-26", + 'last_update_date': '2026-02-07', + "requirements": "xml, json", "category": "LinkedIn", "notes": "", "paths": ('*/com.linkedin.android/shared_prefs/linkedInPrefsName.xml'), "output_types": "standard", "artifact_icon": "user" }, - "get_linkedin_messages": { + "linkedin_messages": { "name": "LinkedIn - Messages", "description": "Messages sent and received from LinkedIn App", "author": "Marco Neumann {kalinko@be-binary.de}", - "version": "0.0.1", - "date": "2025-04-26", + "version": "0.1", + 'creation_date': '2025-04-26', + 'last_update_date': '2026-02-07', "requirements": "", "category": "LinkedIn", "notes": "", @@ -39,14 +38,13 @@ } } - import json import xml.etree.ElementTree as ET from scripts.ilapfuncs import artifact_processor, convert_unix_ts_to_utc, get_sqlite_db_records @artifact_processor -def get_linkedin_account(files_found, report_folder, seeker, wrap_text): +def linkedin_account(files_found, _report_folder, _seeker, _wrap_text): # Get data from xml into a dict to work with xml_dict = {} @@ -74,21 +72,21 @@ def get_linkedin_account(files_found, report_folder, seeker, wrap_text): first_name = temp_meModel['miniProfile']['firstName'] headline = temp_meModel['miniProfile']['occupation'] public_identifier = temp_meModel['miniProfile']['publicIdentifier'] - data_list = [(last_login, member_id, last_name, first_name, headline, public_identifier)] + data_list = [(last_login, member_id, account_mail, last_name, first_name, headline, public_identifier)] - data_headers = ('Last Login', 'Member ID', 'Last Name', 'First Name', 'Headline', 'Public Identifier') + data_headers = ('Last Login', 'Member ID', 'Account Mail', 'Last Name', 'First Name', 'Headline', 'Public Identifier') return data_headers, data_list, files_found[0] @artifact_processor -def get_linkedin_messages(files_found, report_folder, seeker, wrap_text): +def linkedin_messages(files_found, _report_folder, _seeker, _wrap_text): files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] query = (''' SELECT - strftime('%Y-%m-%d %H:%M:%S.', "md"."deliveredAt"/1000, 'unixepoch') || ("md"."deliveredAt"%1000) [deliveredAt], + md.deliveredAt[deliveredAt], CASE WHEN md.status = '5' THEN 'Delivered' ELSE 'Unknown' @@ -109,7 +107,7 @@ def get_linkedin_messages(files_found, report_folder, seeker, wrap_text): data_list = [] for row in db_records: - delivery_date = row[0] + delivery_date = convert_unix_ts_to_utc(int(row[0])/1000) delivery_status = row[1] sender_firstname = row[2] sender_lastname = row[3] diff --git a/scripts/artifacts/RandoChat.py b/scripts/artifacts/RandoChat.py new file mode 100644 index 00000000..5708540c --- /dev/null +++ b/scripts/artifacts/RandoChat.py @@ -0,0 +1,228 @@ +# Android RandoChat App (com.random.chat.app) + +# Tested Version: 6.3.3 + + +__artifacts_v2__ = { + 'randochat_messages': { + 'name': 'RandoChat Messages', + 'description': 'Parses RandoChat App Messages', + 'author': 'Marco Neumann {kalinko@be-binary.de}', + 'version': '0.0.1', + 'creation_date': '2026-01-15', + 'last_update_date': '2026-01-15', + 'requirements': 'os, path', + 'category': 'Chats', + 'notes': '', + 'paths': ( + '*/data/com.random.chat.app/databases/ramdochatV2.db*', + '*/Android/data/com.random.chat.app/files/Pictures/RandoChat/*', + '*/Android/data/com.random.chat.app/files/images/*', + '*/Android/data/com.random.chat.app/files/Music/RandoChat/*' + ), + 'output_types': 'standard', + 'artifact_icon': 'message-square', + "html_columns": ["Media File"] + }, + 'randochat_account': { + 'name': 'RandoChat Accounts', + 'description': 'Parses RandoChat App Accounts', + 'author': 'Marco Neumann {kalinko@be-binary.de}', + 'version': '0.0.1', + 'creation_date': '2026-01-15', + 'last_update_date': '2026-01-15', + 'requirements': '', + 'category': 'Accounts', + 'notes': '', + 'paths': ( + '*/data/com.random.chat.app/databases/ramdochatV2.db*' + ), + 'output_types': 'standard', + 'artifact_icon': 'user' + }, + 'randochat_contacts': { + 'name': 'RandoChat Contacts', + 'description': 'Parses RandoChat App Contacts', + 'author': 'Marco Neumann {kalinko@be-binary.de}', + 'version': '0.0.1', + 'creation_date': '2026-01-15', + 'last_update_date': '2026-01-15', + 'requirements': '', + 'category': 'Contacts', + 'notes': '', + 'paths': ( + '*/data/com.random.chat.app/databases/ramdochatV2.db*' + ), + 'output_types': 'standard', + 'artifact_icon': 'user' + } +} + +import os + +from scripts.ilapfuncs import artifact_processor, convert_unix_ts_to_utc, get_sqlite_db_records, media_to_html + +@artifact_processor +def randochat_messages(files_found, report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + + # Get the different files found and store their pathes in corresponding lists to work with them + main_db = '' + attachments = [] + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('ramdochatV2.db'): + main_db = file_found + + if 'files' in os.path.dirname(file_found): + attachments.append(file_found) + + + query = ''' + SELECT + m.hora [Timestamp], + m.mensagem [Message Content], + c.apelido [Contact Username], + m.minha [Sent?], -- 1 = sent, 2 = received + m.url [Media File], + m.id_talk_server [Conversation ID], + m.id_servidor [Message ID] + FROM mensagens m + LEFT JOIN conversa c ON c.id_server = m.id_talk_server + ''' + + db_records = get_sqlite_db_records(main_db, query) + data_list = [] + + for row in db_records: + timestamp = convert_unix_ts_to_utc(int(row[0])/1000) + content = row[1] + contact_name = row[2] + direction = row[3] + media_file = row[4] + conv_id = row[5] + message_id = row[6] + + # Handling attachments + if media_file is None: + attachment = 0 + else: + attachment = '' + for att_path in attachments: + if os.path.basename(media_file) in os.path.basename(att_path): + attachment = media_to_html(os.path.basename(att_path), attachments, report_folder) + + data_list.append((timestamp, content, contact_name, direction, attachment, conv_id, message_id)) + + data_headers = ('Timestamp', 'Content', 'Contact Username', 'Sent?', 'Media File', 'Conversation ID', 'Message ID') + + return data_headers, data_list, main_db + + +@artifact_processor +def randochat_account(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + + # Get the different files found and store their pathes in corresponding lists to work with them + main_db = '' + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('ramdochatV2.db'): + main_db = file_found + + + query = ''' + SELECT + MAX(CASE WHEN name LIKE 'apelido' THEN value END) [Username], + MAX(CASE WHEN name LIKE 'sexo' THEN (CASE WHEN value = 'H' THEN 'Male' WHEN value = 'M' THEN 'Female' END) END) [User Sex], + MAX(CASE WHEN name LIKE 'idade' THEN value END) [User Age], + MAX(CASE WHEN name LIKE 'language' THEN value END) [Language], + MAX(CASE WHEN name LIKE 'device_id' THEN value END) [Device ID], + MAX(CASE WHEN name LIKE 'idade_de' THEN value END) [Preferred Age From], + MAX(CASE WHEN name LIKE 'idade_ate' THEN value END) [Preferred Age To], + MAX(CASE WHEN name LIKE 'sexo_search' THEN (CASE WHEN value = 'H' THEN 'Male' WHEN value = 'M' THEN 'Female' END) END) [Preferred Sex] + FROM configuracao + ''' + + db_records = get_sqlite_db_records(main_db, query) + data_list = [] + + for row in db_records: + username = row[0] + sex = row[1] + age = row[2] + language = row[3] + device_id = row[4] + age_from = row[5] + age_to = row[6] + preferred_sex = row[7] + + + data_list.append((username, sex, age, language, device_id, age_from, age_to, preferred_sex)) + + data_headers = ('Username', 'User Sex', 'User Age', 'Language', 'Device ID', 'Preferred Age From', 'Preferred Age To', 'Preferred Sex') + + return data_headers, data_list, main_db + + +@artifact_processor +def randochat_contacts(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + main_db = '' + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('ramdochatV2.db'): + main_db = file_found + + + query = ''' + SELECT + c.id_pessoa [Account ID], + c.apelido [Username], + c.idade [Age], + CASE + WHEN c.sexo = 'M' THEN 'Female' -- From Mulher + WHEN c.sexo = 'H' THEN 'Male' -- From Homem + END [Sex], + CASE + WHEN c.favorite = 1 THEN 'Yes' + WHEN c.favorite = 0 THEN 'No' + END [Favorite?], + CASE + WHEN c.bloqueado = 1 THEN 'Yes' + WHEN c.bloqueado = 0 THEN 'No' + END [Blocked?], + CASE WHEN + c.images = '' THEN 'n/a' + ELSE + json_extract(c.images, "$[0].img") + END [Link Profile Pic] + FROM conversa c + ''' + + db_records = get_sqlite_db_records(main_db, query) + data_list = [] + + for row in db_records: + account_id = row[0] + username = row[1] + age = row[2] + sex = row[3] + favorite = row[4] + blocked = row[5] + profile_pic = row[6] + + + data_list.append((account_id, username, age, sex, favorite, blocked, profile_pic)) + + data_headers = ('Account ID', 'Username', 'Age', 'Sex', 'Favorite?', 'Blocked?', 'Link Profile Pic') + + return data_headers, data_list, main_db \ No newline at end of file diff --git a/scripts/artifacts/SamsungDeviceHealthManagement.py b/scripts/artifacts/SamsungDeviceHealthManagement.py new file mode 100644 index 00000000..505a189b --- /dev/null +++ b/scripts/artifacts/SamsungDeviceHealthManagement.py @@ -0,0 +1,197 @@ +# Android Samsung Device Health Management Service SDHMS (com.sec.android.sdhms) +# Author: Marco Neumann (kalinko@be-binary.de) +# +# Requirements: + +__artifacts_v2__ = { + + "sdhms_config_reloads": { + "name": "SDHMS Config Reload History", + "description": "SDHMS Config Reload History - Shows e.g. Reboot of Device. More info: https://bebinary4n6.blogspot.com/2026/01/inside-android-samsung-dhms-extracting.html", + "author": "Marco Neumann {kalinko@be-binary.de}", + "version": "0.0.1", + "creation_date": "2026-01-10", + "last_update_date": "2026-01-10", + "requirements": "", + "category": "Samsung Device Health Management Service", + "notes": "", + "paths": ('*/com.sec.android.sdhms/databases/anomaly.db*'), + "artifact_icon": "settings" + }, + "sdhms_netstat": { + "name": "SDHMS Netstat", + "description": "SDHMS Network Usage per App. More info: https://bebinary4n6.blogspot.com/2026/01/inside-android-samsung-dhms-extracting.html", + "author": "Marco Neumann {kalinko@be-binary.de}", + "version": "0.0.1", + "creation_date": "2026-01-10", + "last_update_date": "2026-01-10", + "requirements": "", + "category": "Samsung Device Health Management Service", + "notes": "", + "paths": ('*/com.sec.android.sdhms/databases/thermal_log*'), + "artifact_icon": "bar-chart" + }, + "sdhms_temperature": { + "name": "SDHMS Temperature Logs", + "description": "SDHMS Temperature Logs per Sensor in degree Celsius. More info: https://bebinary4n6.blogspot.com/2026/01/inside-android-samsung-dhms-extracting.html", + "author": "Marco Neumann {kalinko@be-binary.de}", + "version": "0.0.1", + "creation_date": "2026-01-10", + "last_update_date": "2026-01-10", + "requirements": "", + "category": "Samsung Device Health Management Service", + "notes": "", + "paths": ('*/com.sec.android.sdhms/databases/thermal_log*'), + "artifact_icon": "thermometer" + }, + "sdhms_cpustats": { + "name": "SDHMS CPU Stats", + "description": "SDHMS CPU Usage per Process. More info: https://bebinary4n6.blogspot.com/2026/01/inside-android-samsung-dhms-extracting.html", + "author": "Marco Neumann {kalinko@be-binary.de}", + "version": "0.0.1", + "creation_date": "2026-01-10", + "last_update_date": "2026-01-10", + "requirements": "", + "category": "Samsung Device Health Management Service", + "notes": "", + "paths": ('*/com.sec.android.sdhms/databases/thermal_log*'), + "artifact_icon": "cpu" + } + +} + +from scripts.ilapfuncs import artifact_processor, convert_unix_ts_to_utc, get_sqlite_db_records + +@artifact_processor +def sdhms_config_reloads(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + query = (''' + SELECT + time, + reason, + config_key, + config_version + FROM config_history + ''') + + data_list = [] + + db_records = get_sqlite_db_records(str(files_found[0]), query) + + for row in db_records: + config_reload_time = convert_unix_ts_to_utc(int(row[0])/1000) + reason = row[1] + config_key = row[2] + config_version = row[3] + + data_list.append(( config_reload_time, reason, config_key, config_version)) + + data_headers = ('Config Reload Time' , 'Config Reload Reason', 'Config Key', 'Config Version') + + return data_headers, data_list, files_found[0] + +@artifact_processor +def sdhms_netstat(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + query = (''' + SELECT + start_time, + end_time, + id, + package_name, + uid, + net_usage + FROM NETSTAT + ''') + + data_list = [] + + db_records = get_sqlite_db_records(str(files_found[0]), query) + + for row in db_records: + start_time = convert_unix_ts_to_utc(int(row[0])/1000) + end_time = convert_unix_ts_to_utc(int(row[1])/1000) + entry_id = row[2] + package_name = row[3] + package_uid = row[4] + net_usage = row[5] + + data_list.append(( start_time, end_time, entry_id, package_name, package_uid, net_usage)) + + data_headers = ('Start Time', 'End Time', 'Entry ID', 'Package Name', 'Package UID', 'Network Usage') + + return data_headers, data_list, files_found[0] + +@artifact_processor +def sdhms_temperature(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + query = (''' + SELECT + timestamp, + skin_temp/10.0 [Chassis Temperature], + ap_temp/10.0 [Processor Temperature], + bat_temp/10.0 [Battery Temperature], + usb_temp/10.0 [USB Temperature], + chg_temp/10.0 [Charging IC Temperature], + pa_temp/10.0 [Cellular Radio Temperature], + wifi_temp/10.0 [WiFi Temperature] + FROM TEMPERATURE + ''') + + data_list = [] + + db_records = get_sqlite_db_records(str(files_found[0]), query) + + for row in db_records: + timestamp = convert_unix_ts_to_utc(int(row[0])/1000) + skin_temp = row[1] + ap_temp = row[2] + bat_temp = row[3] + usb_temp = row[4] + chg_temp = row[5] + pa_temp = row[6] + wifi_temp = row[7] + + data_list.append(( timestamp, skin_temp, ap_temp, bat_temp, usb_temp, chg_temp, pa_temp, wifi_temp)) + + data_headers = ('Timestamp', 'Chassis Temperature', 'Processor Temperatur', 'Battery Temperature', 'USB Temperature', 'Charging IC Temperature', 'Cellular Radio Temperature', 'WiFi Temperature') + + return data_headers, data_list, files_found[0] + +@artifact_processor +def sdhms_cpustats(files_found, _report_folder, _seeker, _wrap_text): + files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')] + + query = (''' + SELECT + start_time, + end_time, + uptime [Uptime], + process_name [Process Name], + uid [Package ID], + pid [Process ID], + process_usage [Process Usage] + FROM CPUSTAT + ''') + + data_list = [] + + db_records = get_sqlite_db_records(str(files_found[0]), query) + + for row in db_records: + start_time = convert_unix_ts_to_utc(int(row[0])/1000) + end_time = convert_unix_ts_to_utc(int(row[1])/1000) + uptime = row[2] + process_name = row[3] + package_id = row[4] + process_id = row[5] + process_cpu_usage = row[6] + + data_list.append(( start_time, end_time, uptime, process_name, package_id, process_id, process_cpu_usage)) + + data_headers = ('Start Time', 'End Time', 'Uptime', 'Process Name', 'Package ID', 'PRocess ID', 'Process CPU Usage') + + return data_headers, data_list, files_found[0] \ No newline at end of file diff --git a/scripts/artifacts/SamsungNotes.py b/scripts/artifacts/SamsungNotes.py new file mode 100644 index 00000000..3f071435 --- /dev/null +++ b/scripts/artifacts/SamsungNotes.py @@ -0,0 +1,86 @@ +# Android Samsung Notes App (com.samsung.android.app.notes) +# Author: Marco Neumann (kalinko@be-binary.de) +# Tested Version: 4.4.30.91 + + +__artifacts_v2__ = { + + "snotes": { + "name": "Samsung Notes", + "description": "Samsung Notes", + "author": "Marco Neumann {kalinko@be-binary.de}", + "version": "0.0.1", + "creation_date": "2026-01-17", + "last_update_date": "2026-01-17", + "requirements": "os", + "category": "Notes", + "notes": "", + "output_types": ["standard"], + "html_columns": ["Media"], + "paths": ( '*/com.samsung.android.app.notes/databases/sdoc.db*', + '*/user/*/com.samsung.android.app.notes/SDocData/*/media/*'), + "artifact_icon": "edit" + } + +} + +import os +from scripts.ilapfuncs import artifact_processor, convert_unix_ts_to_utc, get_sqlite_db_records, media_to_html + +@artifact_processor +def snotes(files_found, report_folder, _seeker, _wrap_text): + + main_db = '' + medias = [] + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('sdoc.db'): + main_db = file_found + + if 'media' in os.path.dirname(file_found): + if not file_found.endswith('dat') and not file_found.endswith('spi'): + medias.append(file_found) + + query = (''' + SELECT + sd.createdAt, + sd.lastModifiedAt, + sd.title, + sd.content, + sd.isDeleted , + sd.recycle_bin_time_moved, + sd.firstOpendAt, + sd.secondOpenedAt, + sd.lastOpenedAt, + sd.filePath + FROM sdoc sd + ''') + + data_list = [] + + db_records = get_sqlite_db_records(main_db, query) + + for row in db_records: + created = convert_unix_ts_to_utc(int(row[0])/1000) + last_modified = convert_unix_ts_to_utc(int(row[1])/1000) + title = row[2] + content = row[3].decode('utf-8') + is_deleted = row[4] + deleted = convert_unix_ts_to_utc(int(row[5])/1000) + first_opened = convert_unix_ts_to_utc(int(row[6])/1000) + second_opened = convert_unix_ts_to_utc(int(row[7])/1000) + last_opened = convert_unix_ts_to_utc(int(row[8])/1000) + file_path = row[9] + + media = [] + for media_path in medias: + if os.path.basename(file_path) in media_path: + media.append(media_to_html(os.path.basename(media_path), medias, report_folder)) + + data_list.append(( created, last_modified, title, content, is_deleted, deleted, first_opened, second_opened, last_opened, media)) + + data_headers = ('Creation Time' , 'Last Modification TIme', 'Title', 'Text Content', 'Deleted?', 'Deletion Time', 'First Opened Time', 'Second Opened Time', 'Last Opened Time', 'Media') + + return data_headers, data_list, files_found[0]