diff --git a/scripts/artifacts/FacebookMessenger.py b/scripts/artifacts/FacebookMessenger.py index b1b22c58..f64c449d 100755 --- a/scripts/artifacts/FacebookMessenger.py +++ b/scripts/artifacts/FacebookMessenger.py @@ -207,8 +207,17 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): source_file = file_found.replace(seeker.data_folder, '') db = open_sqlite_db_readonly(file_found) cursor = db.cursor() + + cursor.execute("PRAGMA table_info('messages');") + cols = [r[1] for r in cursor.fetchall()] + if 'reaction_timestamp' in cols: + reaction_ts_sql = 'datetime(message_reactions.reaction_timestamp/1000,\'unixepoch\') as "Message Reaction Timestamp",' + else: + reaction_ts_sql = "'' as \"Message Reaction Timestamp\"," + + try: - cursor.execute(''' + cursor.execute(f''' select case messages.timestamp_ms when 0 then '' @@ -224,7 +233,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_sql} messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id @@ -233,7 +242,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): ''') snippet = 1 except: - cursor.execute(''' + cursor.execute(f''' select case messages.timestamp_ms when 0 then '' @@ -248,7 +257,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_sql} messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id @@ -322,8 +331,26 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc(f'No Facebook{typeof}- Calls{usernum} - threads_db2 data available') + + cursor.execute("PRAGMA table_info('thread_users');") + cols = [r[1] for r in cursor.fetchall()] + if 'friendship_status' in cols: + friendship_status_sql = """case friendship_status + when 0 then "N/A (Self)" + when 1 then "Friends" + when 2 then "Friend Request Received" + when 3 then "Friend Request Sent" + when 4 then "Not Friends" + end as "Friendship Status",""" + else: + friendship_status_sql = "'' as \"Friendship Status\"," + + if 'contact_relationship_status' in cols: + contact_relationship_status_sql = 'contact_relationship_status as "Contact Relationship Status"' + else: + contact_relationship_status_sql = "'' as \"Contact Relationship Status\"" - cursor.execute(''' + cursor.execute(f''' select substr(user_key,10), first_name, @@ -338,8 +365,8 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): when 0 then 'No' when 1 then 'Yes' end is_friend, - friendship_status, - contact_relationship_status + {friendship_status_sql} + {contact_relationship_status_sql} from thread_users ''') diff --git a/scripts/artifacts/deviceHealthServices_Battery.py b/scripts/artifacts/deviceHealthServices_Battery.py index 38f2c8df..0fffd920 100644 --- a/scripts/artifacts/deviceHealthServices_Battery.py +++ b/scripts/artifacts/deviceHealthServices_Battery.py @@ -28,21 +28,26 @@ "artifact_icon": "bluetooth" } } - -import sqlite3 -import textwrap import os -from packaging import version -from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import artifact_processor, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone @artifact_processor -def Turbo_Battery(files_found, report_folder, seeker, wrap_text): +def Turbo_Battery(files_found, _report_folder, seeker, _wrap_text): source_file_turbo = '' turbo_db = '' data_list = [] + # try to get a timezone offset from seeker; fall back to UTC (0) if not available + try: + time_offset = getattr(seeker, 'timezone', None) + if time_offset is None: + time_offset = getattr(seeker, 'time_zone', 0) + if time_offset is None: + time_offset = 0 + except AttributeError: + time_offset = 0 + for file_found in files_found: file_found = str(file_found) if file_found.lower().endswith('turbo.db'): @@ -80,7 +85,11 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): if timestamp is None: pass else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) + try: + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) + except (ValueError, TypeError): + # fallback to UTC human timestamp if timezone conversion fails + timestamp = convert_ts_human_to_utc(timestamp) data_list.append((timestamp,row[1],row[2],row[3],row[4],file_found)) db.close() @@ -90,40 +99,55 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): return data_headers, data_list, source_file_turbo @artifact_processor -def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): +def Turbo_Bluetooth(files_found, _report_folder, seeker, _wrap_text): source_file_bluetooth = '' - turbo_db = '' + bluetooth_db = '' data_list = [] - if file_found.lower().endswith('bluetooth.db'): - bluetooth_db = str(file_found) - source_file_bluetooth = file_found.replace(seeker.directory, '') + # attempt to get timezone offset; default to UTC (0) + try: + time_offset = getattr(seeker, 'timezone', None) + if time_offset is None: + time_offset = getattr(seeker, 'time_zone', 0) + if time_offset is None: + time_offset = 0 + except AttributeError: + time_offset = 0 + + for file_found in files_found: + file_found = str(file_found) + if file_found.lower().endswith('bluetooth.db'): + bluetooth_db = str(file_found) + source_file_bluetooth = file_found.replace(seeker.directory, '') - db = open_sqlite_db_readonly(bluetooth_db) - cursor = db.cursor() - cursor.execute(''' - select - datetime(timestamp_millis/1000,'unixepoch'), - bd_addr, - device_identifier, - battery_level, - volume_level, - time_zone - from battery_event - join device_address on battery_event.device_idx = device_address.device_idx - ''') + db = open_sqlite_db_readonly(bluetooth_db) + cursor = db.cursor() + cursor.execute(''' + select + datetime(timestamp_millis/1000,'unixepoch'), + bd_addr, + device_identifier, + battery_level, + volume_level, + time_zone + from battery_event + join device_address on battery_event.device_idx = device_address.device_idx + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - for row in all_rows: - timestamp = row[0] - if timestamp is None: - pass - else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) - data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) - db.close() + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + timestamp = row[0] + if timestamp is None: + pass + else: + try: + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) + except (ValueError, TypeError): + timestamp = convert_ts_human_to_utc(timestamp) + data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) + db.close() data_headers = (('Timestamp','datetime'),'BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone','Source') diff --git a/scripts/artifacts/googleDuo.py b/scripts/artifacts/googleDuo.py index 2717ec5f..a34d653c 100755 --- a/scripts/artifacts/googleDuo.py +++ b/scripts/artifacts/googleDuo.py @@ -1,13 +1,11 @@ import os import shutil import sqlite3 -import textwrap -import scripts.artifacts.artGlobals from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly -def get_googleDuo(files_found, report_folder, seeker, wrap_text): +def get_googleDuo(files_found, report_folder, _seeker, _wrap_text): for file_found in files_found: file_found = str(file_found) @@ -16,6 +14,8 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() + + # --- Query 1: Activity History --- cursor.execute(''' select datetime(timestamp_usec/1000000, 'unixepoch') as 'Timestamp', @@ -47,7 +47,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Duo - Call History') report.start_artifact_report(report_folder, 'Google Duo - Call History') report.add_script() - data_headers = ('Timestamp','Local User','Remote User','Contact Name','Activity Type','Call Status','Direction') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Timestamp','Local User','Remote User','Contact Name','Activity Type','Call Status','Direction') data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6])) @@ -55,14 +55,15 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Duo - Call History' + tsvname = 'Google Duo - Call History' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Call History' + tlactivity = 'Google Duo - Call History' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Call History data available') + # --- Query 2: Contacts --- cursor.execute(''' select case system_contact_last_update_millis @@ -83,7 +84,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Duo - Contacts') report.start_artifact_report(report_folder, 'Google Duo - Contacts') report.add_script() - data_headers = ('Last Updated Timestamp','Contact Name','Contact Info','Contact Label','Contact ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Last Updated Timestamp','Contact Name','Contact Info','Contact Label','Contact ID') data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4])) @@ -91,15 +92,36 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Duo - Contacts' + tsvname = 'Google Duo - Contacts' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Contacts' + tlactivity = 'Google Duo - Contacts' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Contacts data available') - cursor.execute(''' + # --- Query 3: Messages / Notes (FIXED) --- + + # Check if 'saved_status' column exists in 'messages' table + try: + cursor.execute("PRAGMA table_info(messages)") + columns = [column[1] for column in cursor.fetchall()] + has_saved_status = 'saved_status' in columns + except (sqlite3.OperationalError, sqlite3.DatabaseError): + has_saved_status = False + + # Define the 'File Saved' column selection based on availability + if has_saved_status: + saved_status_query = """ + case saved_status + when 0 then '' + when 1 then 'Yes' + end as 'File Saved' + """ + else: + saved_status_query = "'' as 'File Saved'" + + query_messages = f''' select case sent_timestamp_millis when 0 then '' @@ -118,12 +140,11 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): content_uri, replace(content_uri, rtrim(content_uri, replace(content_uri, '/', '')), '') as 'File Name', content_size_bytes, - case saved_status - when 0 then '' - when 1 then 'Yes' - end as 'File Saved' + {saved_status_query} from messages - ''') + ''' + + cursor.execute(query_messages) all_rows = cursor.fetchall() usageentries = len(all_rows) @@ -137,7 +158,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): viewed_ts = row[2] sender_id = row[3] recipient_id = row[4] - content_uri = row[5] + # row[5] is content_uri, unused in report logic, removed to fix pylint warning content_name = row[6] content_size = row[7] file_saved = row[8] @@ -149,20 +170,21 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): data_file_name = os.path.basename(match) thumb = f'' - data_list.append((row[0],row[1],row[2],row[3],row[4],thumb,row[7],row[8])) + # FIX: Using the variables defined above instead of row[] indices + data_list.append((sent_ts, received_ts, viewed_ts, sender_id, recipient_id, thumb, content_size, file_saved)) report = ArtifactHtmlReport('Google Duo - Notes') report.start_artifact_report(report_folder, 'Google Duo - Notes') report.add_script() - data_headers = ('Sent Timestamp','Received Timestamp','Viewed Timestamp','Sender','Recipient','Content','Size','File Saved') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Sent Timestamp','Received Timestamp','Viewed Timestamp','Sender','Recipient','Content','Size','File Saved') report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Content']) report.end_artifact_report() - tsvname = f'Google Duo - Notes' + tsvname = 'Google Duo - Notes' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Notes' + tlactivity = 'Google Duo - Notes' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Notes data available') diff --git a/scripts/artifacts/googleMessages.py b/scripts/artifacts/googleMessages.py index 38c9eb04..437e9e94 100755 --- a/scripts/artifacts/googleMessages.py +++ b/scripts/artifacts/googleMessages.py @@ -4,14 +4,12 @@ # Version: 0.1 # Requirements: None -import os import sqlite3 -import textwrap from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly -def get_googleMessages(files_found, report_folder, seeker, wrap_text): +def get_googleMessages(files_found, report_folder, _seeker, _wrap_text): for file_found in files_found: file_found = str(file_found) @@ -20,28 +18,52 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' + + try: + cursor.execute("PRAGMA table_info(parts)") + columns = [column[1] for column in cursor.fetchall()] + except sqlite3.OperationalError: + columns = [] + + if 'file_size_bytes' in columns: + file_size_query = ''' + CASE + WHEN parts.file_size_bytes=-1 THEN "N/A" + ELSE parts.file_size_bytes + END''' + else: + file_size_query = "'N/A'" + + if 'local_cache_path' in columns: + local_cache_query = 'parts.local_cache_path' + else: + local_cache_query = "'N/A'" + + query = f''' SELECT datetime(parts.timestamp/1000,'unixepoch') AS "Timestamp (UTC)", parts.content_type AS "Message Type", conversations.name AS "Other Participant/Conversation Name", participants.display_destination AS "Message Sender", parts.text AS "Message", - CASE - WHEN parts.file_size_bytes=-1 THEN "N/A" - ELSE parts.file_size_bytes - END AS "Attachment Byte Size", - parts.local_cache_path AS "Attachment Location" + {file_size_query} AS "Attachment Byte Size", + {local_cache_query} AS "Attachment Location" FROM parts JOIN messages ON messages._id=parts.message_id JOIN participants ON participants._id=messages.sender_id JOIN conversations ON conversations._id=parts.conversation_id ORDER BY "Timestamp (UTC)" ASC - ''') + ''' + + try: + cursor.execute(query) + all_rows = cursor.fetchall() + usageentries = len(all_rows) + except sqlite3.Error as e: + logfunc(f'Error executing query in Google Messages: {e}') + usageentries = 0 - all_rows = cursor.fetchall() - usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Google Messages') report.start_artifact_report(report_folder, 'Google Messages') @@ -54,10 +76,10 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Messages' + tsvname = 'Google Messages' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Messages' + tlactivity = 'Google Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Messages data available') diff --git a/scripts/artifacts/keepNotes.py b/scripts/artifacts/keepNotes.py index a50ec7b3..cf48685e 100644 --- a/scripts/artifacts/keepNotes.py +++ b/scripts/artifacts/keepNotes.py @@ -28,39 +28,63 @@ def get_keepNotes(files_found, report_folder, seeker, wrap_text): if filename.endswith('keep.db'): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' - SELECT - datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", - datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", - datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", - tree_entity.title AS Title, - text_search_note_content_content.c0text AS "Text", - tree_entity.last_modifier_email AS "Last Modifier Email" - FROM text_search_note_content_content - INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id - ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='text_search_note_content_content'") + fts_table = cursor.fetchone() - if usageentries > 0: - data_list = [] - for row in all_rows: - data_list.append(row) + if fts_table: + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + text_search_note_content_content.c0text AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM text_search_note_content_content + INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id + ''' + else: + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + GROUP_CONCAT(list_item.text, char(10)) AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM tree_entity + LEFT JOIN list_item ON tree_entity._id = list_item.list_parent_id + GROUP BY tree_entity._id + ''' - report = ArtifactHtmlReport('Google Keep Notes') - report.start_artifact_report(report_folder, 'Google Keep Notes') - report.add_script() - data_headers = ('Time Created', 'Time Last Updated', 'User Edited Timestamp', 'Title', 'Text', 'Last Modifier Email') - report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) - report.end_artifact_report() + try: + cursor.execute(query) + all_rows = cursor.fetchall() + usageentries = len(all_rows) - tsvname = 'Google Keep Notes' - tsv(report_folder, data_headers, data_list, tsvname) + if usageentries > 0: + data_list = [] + for row in all_rows: + data_list.append(row) - tlactivity = 'Google Keep Notes' - timeline(report_folder, tlactivity, data_list, data_headers) + report = ArtifactHtmlReport('Google Keep Notes') + report.start_artifact_report(report_folder, 'Google Keep Notes') + report.add_script() + data_headers = ('Time Created', 'Time Last Updated', 'User Edited Timestamp', 'Title', 'Text', 'Last Modifier Email') + report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) + report.end_artifact_report() - else: - logfunc('No Google Keep Notes data available') + tsvname = 'Google Keep Notes' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Google Keep Notes' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Google Keep Notes data available') + + except Exception as e: + logfunc(f'Error reading Google Keep Notes: {e}') + + db.close() \ No newline at end of file diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index 05c90e48..350ecb7e 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -1,7 +1,7 @@ __artifacts_v2__ = { "Android Notification History": { "name": "Android Notification History", - "description": "Get Android notifications' history, policy and settings. This parser is based on a research project", + "description": "Get Android notifications' history, policy and settings.", "author": "Evangelos Dragonas (@theAtropos4n6)", "version": "0.0.1", "date": "2024-07-02", @@ -16,26 +16,28 @@ } } - import xml.etree.ElementTree as ET -from datetime import * import os -import scripts.artifacts.notification_history_pb.notificationhistory_pb2 as notificationhistory_pb2 - +from scripts.artifacts.notification_history_pb import notificationhistory_pb2 from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, abxread, checkabx,convert_ts_int_to_utc,convert_utc_human_to_timezone - +from scripts.ilapfuncs import logfunc, tsv, timeline, abxread, checkabx, convert_ts_int_to_utc, convert_utc_human_to_timezone -def get_notificationHistory(files_found, report_folder, seeker, wrap_text): +def get_notificationHistory(files_found, report_folder, _seeker, _wrap_text): data_pb_list = [] + file_directory = None + for file_found in files_found: file_found = str(file_found) file_name = os.path.basename(file_found) - #parsing settings_secure.xml + + # Simpan direktori file terakhir yang diproses untuk digunakan di report + current_file_directory = os.path.dirname(file_found) + + # parsing settings_secure.xml if file_name.endswith('settings_secure.xml'): data_list = [] user = os.path.basename(os.path.dirname(file_found)) - if (checkabx(file_found)): + if checkabx(file_found): multi_root = True tree = abxread(file_found, multi_root) else: @@ -47,28 +49,25 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): value = setting.attrib.get('value') value = "Enabled" if value == "1" else "Disabled" if value == "0" else "Unknown" data_list.append((value, user)) - else: - pass # setting not available if data_list: - description = f'Indicates whether "Notification History" feature is enabled.' + description = 'Indicates whether "Notification History" feature is enabled.' report = ArtifactHtmlReport('Android Notification History - Status') - report.start_artifact_report(report_folder, 'Status',description) + report.start_artifact_report(report_folder, 'Status', description) report.add_script() data_headers = ('Status', 'User') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Android Notification History - Status' + tsvname = 'Android Notification History - Status' tsv(report_folder, data_headers, data_list, tsvname) else: logfunc('No Android Notification History - Status data available') - #parsing notification_policy.xml - if file_name.endswith('notification_policy.xml'): + elif file_name.endswith('notification_policy.xml'): data_list = [] - if (checkabx(file_found)): + if checkabx(file_found): multi_root = False tree = abxread(file_found, multi_root) else: @@ -81,52 +80,67 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): for notification in elem: if notification.tag == 'notification': notification_ts = int(notification.attrib.get('time')) - snooze_time = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(notification_ts/1000.0)),'UTC') + snooze_time = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(notification_ts/1000.0)), 'UTC') notification_key = notification.attrib.get('key') data_list.append((f'{snooze_time}', notification_key)) - else: - pass #no snoozed notifications found + if data_list: - description = f'Notifications the user chose to snooze for a specific time interval' + description = 'Notifications the user chose to snooze for a specific time interval' report = ArtifactHtmlReport('Android Notification History - Snoozed notifications') - report.start_artifact_report(report_folder, 'Snoozed notifications', description) #'Android Notification History - Snoozed notifications') + report.start_artifact_report(report_folder, 'Snoozed notifications', description) report.add_script() data_headers = ('Reminder Time', 'Snoozed Notification') report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Android Notification History - Snoozed notifications' + tsvname = 'Android Notification History - Snoozed notifications' tsv(report_folder, data_headers, data_list, tsvname) - else: logfunc('No Android Notification History - Snoozed notifications data available') else: - #iterate through the notification pbs + if file_name.endswith('.xml'): + continue + + # iterate through the notification pbs try: notification_history = notificationhistory_pb2.NotificationHistoryProto() with open(file_found, 'rb') as f: try: - notification_history.ParseFromString(f.read()) #The error 'Wrong wire type in tag. ' likely happens due to the given .proto map file. - except Exception as e: - logfunc(f'Error in the ParseFromString() function. The error message was: {e}') + content = f.read() + if not content: # Skip if file is empty + continue + notification_history.ParseFromString(content) + except (ValueError, TypeError) as e: + logfunc(f'Error in the ParseFromString() function for {file_name}. The error message was: {e}') + continue - package_map = {i + 1: pkg for i, pkg in enumerate(notification_history.string_pool.strings)} # one of the protobuf files stores the package name and indexes + # FIX: Gunakan getattr untuk menghindari error no-member pada string_pool + string_pool = getattr(notification_history, 'string_pool', None) + if string_pool: + package_map = {i + 1: pkg for i, pkg in enumerate(string_pool.strings)} + else: + package_map = {} - major_version = notification_history.major_version if notification_history.HasField('major_version') else None # notification format version should be 1 - for notification in notification_history.notification: - package_name = notification.package if notification.package else package_map.get(notification.package_index, "") #retrieves package from the map if not stored locally + # FIX: Gunakan getattr untuk major_version + major_version = getattr(notification_history, 'major_version', None) if notification_history.HasField('major_version') else None + + # FIX: Gunakan getattr untuk notification list + notifications = getattr(notification_history, 'notification', []) + + for notification in notifications: + package_name = notification.package if notification.package else package_map.get(notification.package_index, "") - #this block tries to fetch the value of each field from within the parsed protobuf file e.g. variable user_id -> recovers the user_id field from the pb fields = ['uid', 'user_id', 'package_index', 'channel_name', 'channel_id','channel_id_index', 'channel_name_index', 'conversation_id', 'conversation_id_index'] - defaults = {field: 'Error' for field in fields} + # HAPUS: defaults = {field: 'Error' for field in fields} (Unused variable removed) + values = {} for field in fields: try: values[field] = getattr(notification, field) except AttributeError: values[field] = 'Error' - #extra block that does the same for the notifications with icons + if notification.HasField('icon'): icon_fields = ['image_type', 'image_bitmap_filename', 'image_resource_id', 'image_resource_id_package','image_data_length', 'image_data_offset', 'image_uri'] for icon_field in icon_fields: @@ -138,7 +152,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): ] for icon_field in icon_fields: values[icon_field] = None - #here the returned values are assigned to the variables which are reported + uid = values['uid'] user_id = values['user_id'] package_index = values['package_index'] @@ -158,25 +172,40 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): image_data_length = values['image_data_length'] image_data_offset = values['image_data_offset'] image_uri = values['image_uri'] - file_creation = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(file_name)/1000.0),'UTC') + + try: + file_creation = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(file_name)/1000.0),'UTC') + except ValueError: + file_creation = '' + data_pb_list.append((f'{posted_time}',title,text,package_name,user_id,uid,package_index,channel_name,channel_name_index,channel_id,channel_id_index,conversation_id,conversation_id_index,major_version,image_type,image_bitmap_filename,image_resource_id,image_resource_id_package,image_data_length,image_data_offset,image_uri,file_name,f'{file_creation}')) - except Exception as e: - logfunc(f'Error while opening notification pb files. The error message was:" {e}"') + + # Set file_directory jika berhasil mendapatkan data dari setidaknya satu file + file_directory = current_file_directory + except (IOError, OSError) as e: + logfunc(f'Error reading file {file_found}: {e}') + if len(data_pb_list) > 0: - description = f'A history of the notifications that landed on the device during the last 24h' + description = 'A history of the notifications that landed on the device during the last 24h' report = ArtifactHtmlReport('Android Notification History - Notifications') - report.start_artifact_report(report_folder, f'Notifications', description) + report.start_artifact_report(report_folder, 'Notifications', description) report.add_script() - data_headers = ('Posted Time','Title', 'Text','Package Name','User ID','UID','Package Index','Channel Name','Channel Name Index','Channel ID','Channel ID Index','Conversation ID','Conversation ID Index','Major Version','Image Type','Image Bitmap Filename','Image Resource ID','Image Resource ID Package','Image Data Length','Image Data Offset','Image URI','Protobuf File Name','Protobuf File Creation Date')#,'','','','','','','','','','','','','','') - file_directory = os.path.dirname(file_found) - report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) + data_headers = ('Posted Time','Title', 'Text','Package Name','User ID','UID','Package Index','Channel Name','Channel Name Index','Channel ID','Channel ID Index','Conversation ID','Conversation ID Index','Major Version','Image Type','Image Bitmap Filename','Image Resource ID','Image Resource ID Package','Image Data Length','Image Data Offset','Image URI','Protobuf File Name','Protobuf File Creation Date') + + # Pastikan file_directory memiliki nilai sebelum digunakan + if file_directory: + report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) + else: + # Fallback jika file_directory entah kenapa masih None tapi data ada (kasus edge case) + report.write_artifact_data_table(data_headers, data_pb_list, "", html_escape=False) + report.end_artifact_report() - tsvname = f'Android Notification History - Notifications' + tsvname = 'Android Notification History - Notifications' tsv(report_folder, data_headers, data_pb_list, tsvname) - tlactivity = f'Android Notification History - Notifications' + tlactivity = 'Android Notification History - Notifications' timeline(report_folder, tlactivity, data_pb_list, data_headers) else: - logfunc(f'No Android Notification History - Notifications available') \ No newline at end of file + logfunc('No Android Notification History - Notifications available') \ No newline at end of file