From 6c42d4c9aa1efaaca658540239453cbdf06ec079 Mon Sep 17 00:00:00 2001 From: Kaleb-Dimitrio <145956341+Kaleb-Dimitrio@users.noreply.github.com> Date: Wed, 10 Dec 2025 04:21:05 +0700 Subject: [PATCH] Fix: Handle missing database columns in multiple artifacts - notificationHistory: Fix unreachable code and initialize package_map from string_pool - deviceHealthServices_Battery: Replace undefined time_offset with 'UTC' - keepNotes: Add FTS table validation and fallback to list_item.text - googleMessages: Add schema validation for optional columns (file_size_bytes, local_cache_path) - FacebookMessenger: Add schema validation for optional columns (snippet, reactions) Fixes database compatibility issues across different Android versions. --- scripts/artifacts/FacebookMessenger.py | 62 +++++++++++++++---- .../artifacts/deviceHealthServices_Battery.py | 6 +- scripts/artifacts/googleMessages.py | 30 ++++++--- scripts/artifacts/keepNotes.py | 49 +++++++++++---- scripts/artifacts/notificationHistory.py | 26 ++++++-- 5 files changed, 132 insertions(+), 41 deletions(-) diff --git a/scripts/artifacts/FacebookMessenger.py b/scripts/artifacts/FacebookMessenger.py index b1b22c58..4d54ff45 100755 --- a/scripts/artifacts/FacebookMessenger.py +++ b/scripts/artifacts/FacebookMessenger.py @@ -207,8 +207,28 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): source_file = file_found.replace(seeker.data_folder, '') db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - try: - cursor.execute(''' + # Detect schema variations for messages and message_reactions tables + cursor.execute("PRAGMA table_info(messages)") + msg_cols = [r[1] for r in cursor.fetchall()] + has_snippet = 'snippet' in msg_cols + + cursor.execute("PRAGMA table_info(message_reactions)") + mr_cols = [r[1] for r in cursor.fetchall()] + + if 'reaction_timestamp' in mr_cols: + reaction_ts_expr = "datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as \"Message Reaction Timestamp\"" + elif 'reaction_timestamp_ms' in mr_cols: + reaction_ts_expr = "datetime(message_reactions.reaction_timestamp_ms/1000,'unixepoch') as \"Message Reaction Timestamp\"" + elif 'reaction_creation_timestamp_ms' in mr_cols: + reaction_ts_expr = "datetime(message_reactions.reaction_creation_timestamp_ms/1000,'unixepoch') as \"Message Reaction Timestamp\"" + elif 'reaction_creation_time_ms' in mr_cols: + reaction_ts_expr = "datetime(message_reactions.reaction_creation_time_ms/1000,'unixepoch') as \"Message Reaction Timestamp\"" + else: + reaction_ts_expr = "'' as \"Message Reaction Timestamp\"" + + # Build the appropriate SELECT depending on whether messages.snippet exists + if has_snippet: + select_sql = f''' select case messages.timestamp_ms when 0 then '' @@ -224,16 +244,16 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_expr}, messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id where messages.thread_key=threads.thread_key and generic_admin_message_extensible_data IS NULL and msg_type != -1 order by messages.thread_key, datestamp; - ''') + ''' snippet = 1 - except: - cursor.execute(''' + else: + select_sql = f''' select case messages.timestamp_ms when 0 then '' @@ -248,13 +268,15 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_expr}, messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id where messages.thread_key=threads.thread_key and generic_admin_message_extensible_data IS NULL and msg_type != -1 order by messages.thread_key, datestamp; - ''') + ''' + + cursor.execute(select_sql) all_rows = cursor.fetchall() usageentries = len(all_rows) @@ -323,7 +345,21 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): else: logfunc(f'No Facebook{typeof}- Calls{usernum} - threads_db2 data available') - cursor.execute(''' + # Check thread_users schema for optional columns + cursor.execute("PRAGMA table_info(thread_users)") + tu_cols = [r[1] for r in cursor.fetchall()] + + if 'friendship_status' in tu_cols: + friendship_expr = 'friendship_status' + else: + friendship_expr = "'' as friendship_status" + + if 'contact_relationship_status' in tu_cols: + contact_rel_expr = 'contact_relationship_status' + else: + contact_rel_expr = "'' as contact_relationship_status" + + select_thread_users = f''' select substr(user_key,10), first_name, @@ -338,10 +374,12 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): when 0 then 'No' when 1 then 'Yes' end is_friend, - friendship_status, - contact_relationship_status + {friendship_expr}, + {contact_rel_expr} from thread_users - ''') + ''' + + cursor.execute(select_thread_users) all_rows = cursor.fetchall() usageentries = len(all_rows) diff --git a/scripts/artifacts/deviceHealthServices_Battery.py b/scripts/artifacts/deviceHealthServices_Battery.py index 38f2c8df..39a5bfe4 100644 --- a/scripts/artifacts/deviceHealthServices_Battery.py +++ b/scripts/artifacts/deviceHealthServices_Battery.py @@ -80,7 +80,7 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): if timestamp is None: pass else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),'UTC') data_list.append((timestamp,row[1],row[2],row[3],row[4],file_found)) db.close() @@ -121,9 +121,9 @@ def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): if timestamp is None: pass else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),'UTC') data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) - db.close() + db.close() data_headers = (('Timestamp','datetime'),'BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone','Source') diff --git a/scripts/artifacts/googleMessages.py b/scripts/artifacts/googleMessages.py index 38c9eb04..ce724b0d 100755 --- a/scripts/artifacts/googleMessages.py +++ b/scripts/artifacts/googleMessages.py @@ -20,26 +20,40 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' + + # Check which optional columns exist + cursor.execute("PRAGMA table_info(parts)") + columns = [row[1] for row in cursor.fetchall()] + has_file_size = 'file_size_bytes' in columns + has_cache_path = 'local_cache_path' in columns + + # Build SELECT clause based on available columns + file_size_select = '''CASE + WHEN parts.file_size_bytes=-1 THEN "N/A" + ELSE parts.file_size_bytes + END''' if has_file_size else '"N/A"' + + cache_path_select = 'parts.local_cache_path' if has_cache_path else '"N/A"' + + # Build complete query + query = f''' SELECT datetime(parts.timestamp/1000,'unixepoch') AS "Timestamp (UTC)", parts.content_type AS "Message Type", conversations.name AS "Other Participant/Conversation Name", participants.display_destination AS "Message Sender", parts.text AS "Message", - CASE - WHEN parts.file_size_bytes=-1 THEN "N/A" - ELSE parts.file_size_bytes - END AS "Attachment Byte Size", - parts.local_cache_path AS "Attachment Location" + {file_size_select} AS "Attachment Byte Size", + {cache_path_select} AS "Attachment Location" FROM parts JOIN messages ON messages._id=parts.message_id JOIN participants ON participants._id=messages.sender_id JOIN conversations ON conversations._id=parts.conversation_id ORDER BY "Timestamp (UTC)" ASC - ''') - + ''' + + cursor.execute(query) all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: diff --git a/scripts/artifacts/keepNotes.py b/scripts/artifacts/keepNotes.py index a50ec7b3..b9d15269 100644 --- a/scripts/artifacts/keepNotes.py +++ b/scripts/artifacts/keepNotes.py @@ -28,18 +28,43 @@ def get_keepNotes(files_found, report_folder, seeker, wrap_text): if filename.endswith('keep.db'): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' - SELECT - datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", - datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", - datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", - tree_entity.title AS Title, - text_search_note_content_content.c0text AS "Text", - tree_entity.last_modifier_email AS "Last Modifier Email" - FROM text_search_note_content_content - INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id - ''') - + + cursor.execute(""" + SELECT name FROM sqlite_master + WHERE type='table' AND name LIKE '%text_search_note_content%' + """) + fts_table = cursor.fetchone() + + if fts_table: + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + text_search_note_content_content.c0text AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM tree_entity + LEFT JOIN text_search_note_content_content ON text_search_note_content_content.docid = tree_entity._id + WHERE tree_entity.title IS NOT NULL OR text_search_note_content_content.c0text IS NOT NULL + ORDER BY tree_entity.time_created DESC + ''' + else: + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + list_item.text AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM tree_entity + LEFT JOIN list_item ON tree_entity._id = list_item._id + WHERE tree_entity.title IS NOT NULL OR list_item.text IS NOT NULL + ORDER BY tree_entity.time_created DESC + ''' + + cursor.execute(query) all_rows = cursor.fetchall() usageentries = len(all_rows) diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index 05c90e48..c34d77d6 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -104,16 +104,30 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): else: #iterate through the notification pbs try: - notification_history = notificationhistory_pb2.NotificationHistoryProto() - with open(file_found, 'rb') as f: + with open(file_found, "rb") as f: + data = f.read() + #Skip files that are not real NotificationHistory Protobufs + if len(data) < 32: + logfunc(f"Skipped, too small to be protobuf: {file_found}") + continue + if b"string pool" not in data: + logfunc(f"Skipped, not a NotificationHistory protobuf: {file_found}") + continue + + notification_history = notificationhistory_pb2.NotificationHistoryProto() try: - notification_history.ParseFromString(f.read()) #The error 'Wrong wire type in tag. ' likely happens due to the given .proto map file. + notification_history.ParseFromString(data) except Exception as e: - logfunc(f'Error in the ParseFromString() function. The error message was: {e}') - - package_map = {i + 1: pkg for i, pkg in enumerate(notification_history.string_pool.strings)} # one of the protobuf files stores the package name and indexes + logfunc(f"Skipped, invalid protobuf format: {file_found} ({e})") + continue major_version = notification_history.major_version if notification_history.HasField('major_version') else None # notification format version should be 1 + + package_map = {} + if notification_history.HasField('string_pool'): + for i, pkg in enumerate(notification_history.string_pool): + package_map[i] = pkg + for notification in notification_history.notification: package_name = notification.package if notification.package else package_map.get(notification.package_index, "") #retrieves package from the map if not stored locally