From 0c517ef9bc60e01ba743d3d9891ba635f426d4ec Mon Sep 17 00:00:00 2001 From: Ariandra Dwiratama Date: Sun, 7 Dec 2025 00:22:54 +0700 Subject: [PATCH 1/5] Enhance database queries and error handling in various artifact scripts for improved data extraction and robustness --- scripts/artifacts/FacebookMessenger.py | 41 +++++++-- .../artifacts/deviceHealthServices_Battery.py | 87 ++++++++++++------ scripts/artifacts/googleDuo.py | 41 +++++++-- scripts/artifacts/googleMessages.py | 48 ++++++++-- scripts/artifacts/keepNotes.py | 88 +++++++++++++------ scripts/artifacts/notificationHistory.py | 27 ++++-- 6 files changed, 243 insertions(+), 89 deletions(-) diff --git a/scripts/artifacts/FacebookMessenger.py b/scripts/artifacts/FacebookMessenger.py index b1b22c58..f64c449d 100755 --- a/scripts/artifacts/FacebookMessenger.py +++ b/scripts/artifacts/FacebookMessenger.py @@ -207,8 +207,17 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): source_file = file_found.replace(seeker.data_folder, '') db = open_sqlite_db_readonly(file_found) cursor = db.cursor() + + cursor.execute("PRAGMA table_info('messages');") + cols = [r[1] for r in cursor.fetchall()] + if 'reaction_timestamp' in cols: + reaction_ts_sql = 'datetime(message_reactions.reaction_timestamp/1000,\'unixepoch\') as "Message Reaction Timestamp",' + else: + reaction_ts_sql = "'' as \"Message Reaction Timestamp\"," + + try: - cursor.execute(''' + cursor.execute(f''' select case messages.timestamp_ms when 0 then '' @@ -224,7 +233,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_sql} messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id @@ -233,7 +242,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): ''') snippet = 1 except: - cursor.execute(''' + cursor.execute(f''' select case messages.timestamp_ms when 0 then '' @@ -248,7 +257,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): (select json_extract (messages.shares, '$[0].description')) as ShareDesc, (select json_extract (messages.shares, '$[0].href')) as ShareLink, message_reactions.reaction as "Message Reaction", - datetime(message_reactions.reaction_timestamp/1000,'unixepoch') as "Message Reaction Timestamp", + {reaction_ts_sql} messages.msg_id from messages, threads left join message_reactions on message_reactions.msg_id = messages.msg_id @@ -322,8 +331,26 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc(f'No Facebook{typeof}- Calls{usernum} - threads_db2 data available') + + cursor.execute("PRAGMA table_info('thread_users');") + cols = [r[1] for r in cursor.fetchall()] + if 'friendship_status' in cols: + friendship_status_sql = """case friendship_status + when 0 then "N/A (Self)" + when 1 then "Friends" + when 2 then "Friend Request Received" + when 3 then "Friend Request Sent" + when 4 then "Not Friends" + end as "Friendship Status",""" + else: + friendship_status_sql = "'' as \"Friendship Status\"," + + if 'contact_relationship_status' in cols: + contact_relationship_status_sql = 'contact_relationship_status as "Contact Relationship Status"' + else: + contact_relationship_status_sql = "'' as \"Contact Relationship Status\"" - cursor.execute(''' + cursor.execute(f''' select substr(user_key,10), first_name, @@ -338,8 +365,8 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): when 0 then 'No' when 1 then 'Yes' end is_friend, - friendship_status, - contact_relationship_status + {friendship_status_sql} + {contact_relationship_status_sql} from thread_users ''') diff --git a/scripts/artifacts/deviceHealthServices_Battery.py b/scripts/artifacts/deviceHealthServices_Battery.py index 38f2c8df..2d578c0c 100644 --- a/scripts/artifacts/deviceHealthServices_Battery.py +++ b/scripts/artifacts/deviceHealthServices_Battery.py @@ -43,6 +43,16 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): turbo_db = '' data_list = [] + # try to get a timezone offset from seeker; fall back to UTC (0) if not available + try: + time_offset = getattr(seeker, 'timezone', None) + if time_offset is None: + time_offset = getattr(seeker, 'time_zone', 0) + if time_offset is None: + time_offset = 0 + except Exception: + time_offset = 0 + for file_found in files_found: file_found = str(file_found) if file_found.lower().endswith('turbo.db'): @@ -80,7 +90,11 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): if timestamp is None: pass else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) + try: + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) + except Exception: + # fallback to UTC human timestamp if timezone conversion fails + timestamp = convert_ts_human_to_utc(timestamp) data_list.append((timestamp,row[1],row[2],row[3],row[4],file_found)) db.close() @@ -92,38 +106,53 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): @artifact_processor def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): source_file_bluetooth = '' - turbo_db = '' + bluetooth_db = '' data_list = [] - if file_found.lower().endswith('bluetooth.db'): - bluetooth_db = str(file_found) - source_file_bluetooth = file_found.replace(seeker.directory, '') + # attempt to get timezone offset; default to UTC (0) + try: + time_offset = getattr(seeker, 'timezone', None) + if time_offset is None: + time_offset = getattr(seeker, 'time_zone', 0) + if time_offset is None: + time_offset = 0 + except Exception: + time_offset = 0 + + for file_found in files_found: + file_found = str(file_found) + if file_found.lower().endswith('bluetooth.db'): + bluetooth_db = str(file_found) + source_file_bluetooth = file_found.replace(seeker.directory, '') - db = open_sqlite_db_readonly(bluetooth_db) - cursor = db.cursor() - cursor.execute(''' - select - datetime(timestamp_millis/1000,'unixepoch'), - bd_addr, - device_identifier, - battery_level, - volume_level, - time_zone - from battery_event - join device_address on battery_event.device_idx = device_address.device_idx - ''') + db = open_sqlite_db_readonly(bluetooth_db) + cursor = db.cursor() + cursor.execute(''' + select + datetime(timestamp_millis/1000,'unixepoch'), + bd_addr, + device_identifier, + battery_level, + volume_level, + time_zone + from battery_event + join device_address on battery_event.device_idx = device_address.device_idx + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - for row in all_rows: - timestamp = row[0] - if timestamp is None: - pass - else: - timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp),time_offset) - data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) - db.close() + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + timestamp = row[0] + if timestamp is None: + pass + else: + try: + timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) + except Exception: + timestamp = convert_ts_human_to_utc(timestamp) + data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) + db.close() data_headers = (('Timestamp','datetime'),'BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone','Source') diff --git a/scripts/artifacts/googleDuo.py b/scripts/artifacts/googleDuo.py index 2717ec5f..20250f96 100755 --- a/scripts/artifacts/googleDuo.py +++ b/scripts/artifacts/googleDuo.py @@ -16,6 +16,8 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() + + # --- Query 1: Activity History --- cursor.execute(''' select datetime(timestamp_usec/1000000, 'unixepoch') as 'Timestamp', @@ -47,7 +49,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Duo - Call History') report.start_artifact_report(report_folder, 'Google Duo - Call History') report.add_script() - data_headers = ('Timestamp','Local User','Remote User','Contact Name','Activity Type','Call Status','Direction') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Timestamp','Local User','Remote User','Contact Name','Activity Type','Call Status','Direction') data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6])) @@ -63,6 +65,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): else: logfunc('No Google Duo - Call History data available') + # --- Query 2: Contacts --- cursor.execute(''' select case system_contact_last_update_millis @@ -83,7 +86,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Duo - Contacts') report.start_artifact_report(report_folder, 'Google Duo - Contacts') report.add_script() - data_headers = ('Last Updated Timestamp','Contact Name','Contact Info','Contact Label','Contact ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Last Updated Timestamp','Contact Name','Contact Info','Contact Label','Contact ID') data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4])) @@ -99,7 +102,28 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): else: logfunc('No Google Duo - Contacts data available') - cursor.execute(''' + # --- Query 3: Messages / Notes (FIXED) --- + + # Check if 'saved_status' column exists in 'messages' table + try: + cursor.execute("PRAGMA table_info(messages)") + columns = [column[1] for column in cursor.fetchall()] + has_saved_status = 'saved_status' in columns + except Exception: + has_saved_status = False + + # Define the 'File Saved' column selection based on availability + if has_saved_status: + saved_status_query = """ + case saved_status + when 0 then '' + when 1 then 'Yes' + end as 'File Saved' + """ + else: + saved_status_query = "'' as 'File Saved'" + + query_messages = f''' select case sent_timestamp_millis when 0 then '' @@ -118,12 +142,11 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): content_uri, replace(content_uri, rtrim(content_uri, replace(content_uri, '/', '')), '') as 'File Name', content_size_bytes, - case saved_status - when 0 then '' - when 1 then 'Yes' - end as 'File Saved' + {saved_status_query} from messages - ''') + ''' + + cursor.execute(query_messages) all_rows = cursor.fetchall() usageentries = len(all_rows) @@ -154,7 +177,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Duo - Notes') report.start_artifact_report(report_folder, 'Google Duo - Notes') report.add_script() - data_headers = ('Sent Timestamp','Received Timestamp','Viewed Timestamp','Sender','Recipient','Content','Size','File Saved') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Sent Timestamp','Received Timestamp','Viewed Timestamp','Sender','Recipient','Content','Size','File Saved') report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Content']) report.end_artifact_report() diff --git a/scripts/artifacts/googleMessages.py b/scripts/artifacts/googleMessages.py index 38c9eb04..eaffd212 100755 --- a/scripts/artifacts/googleMessages.py +++ b/scripts/artifacts/googleMessages.py @@ -20,28 +20,58 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' + + # --- VALIDASI KOLOM DINAMIS --- + # Ambil daftar semua kolom yang ada di tabel 'parts' + try: + cursor.execute("PRAGMA table_info(parts)") + columns = [column[1] for column in cursor.fetchall()] + except Exception: + columns = [] + + # 1. Cek keberadaan kolom 'file_size_bytes' + if 'file_size_bytes' in columns: + file_size_query = ''' + CASE + WHEN parts.file_size_bytes=-1 THEN "N/A" + ELSE parts.file_size_bytes + END''' + else: + file_size_query = "'N/A'" + + # 2. Cek keberadaan kolom 'local_cache_path' (FIX UNTUK ERROR BARU) + if 'local_cache_path' in columns: + local_cache_query = 'parts.local_cache_path' + else: + local_cache_query = "'N/A'" + + # --- KONSTRUKSI QUERY --- + # Masukkan logika query dinamis ke dalam f-string + query = f''' SELECT datetime(parts.timestamp/1000,'unixepoch') AS "Timestamp (UTC)", parts.content_type AS "Message Type", conversations.name AS "Other Participant/Conversation Name", participants.display_destination AS "Message Sender", parts.text AS "Message", - CASE - WHEN parts.file_size_bytes=-1 THEN "N/A" - ELSE parts.file_size_bytes - END AS "Attachment Byte Size", - parts.local_cache_path AS "Attachment Location" + {file_size_query} AS "Attachment Byte Size", + {local_cache_query} AS "Attachment Location" FROM parts JOIN messages ON messages._id=parts.message_id JOIN participants ON participants._id=messages.sender_id JOIN conversations ON conversations._id=parts.conversation_id ORDER BY "Timestamp (UTC)" ASC - ''') + ''' + + try: + cursor.execute(query) + all_rows = cursor.fetchall() + usageentries = len(all_rows) + except Exception as e: + logfunc(f'Error executing query in Google Messages: {e}') + usageentries = 0 - all_rows = cursor.fetchall() - usageentries = len(all_rows) if usageentries > 0: report = ArtifactHtmlReport('Google Messages') report.start_artifact_report(report_folder, 'Google Messages') diff --git a/scripts/artifacts/keepNotes.py b/scripts/artifacts/keepNotes.py index a50ec7b3..73d8a19c 100644 --- a/scripts/artifacts/keepNotes.py +++ b/scripts/artifacts/keepNotes.py @@ -28,39 +28,69 @@ def get_keepNotes(files_found, report_folder, seeker, wrap_text): if filename.endswith('keep.db'): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - cursor.execute(''' - SELECT - datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", - datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", - datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", - tree_entity.title AS Title, - text_search_note_content_content.c0text AS "Text", - tree_entity.last_modifier_email AS "Last Modifier Email" - FROM text_search_note_content_content - INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id - ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) + # --- Cek Keberadaan Tabel --- + # Kita cek apakah tabel FTS (text_search...) ada. Jika tidak, kita gunakan tabel list_item. + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='text_search_note_content_content'") + fts_table = cursor.fetchone() - if usageentries > 0: - data_list = [] - for row in all_rows: - data_list.append(row) + # --- Konstruksi Query --- + if fts_table: + # Query Asli (Jika tabel FTS ada) + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + text_search_note_content_content.c0text AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM text_search_note_content_content + INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id + ''' + else: + # Query Alternatif (Menggunakan tabel list_item) + # Menggunakan GROUP_CONCAT untuk menggabungkan teks jika berupa checklist atau multiline + query = ''' + SELECT + datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", + datetime(tree_entity.time_last_updated/1000, 'unixepoch') AS "Time Last Updated", + datetime(tree_entity.user_edited_timestamp/1000, 'unixepoch') AS "User Edited Timestamp", + tree_entity.title AS Title, + GROUP_CONCAT(list_item.text, char(10)) AS "Text", + tree_entity.last_modifier_email AS "Last Modifier Email" + FROM tree_entity + LEFT JOIN list_item ON tree_entity._id = list_item.list_parent_id + GROUP BY tree_entity._id + ''' - report = ArtifactHtmlReport('Google Keep Notes') - report.start_artifact_report(report_folder, 'Google Keep Notes') - report.add_script() - data_headers = ('Time Created', 'Time Last Updated', 'User Edited Timestamp', 'Title', 'Text', 'Last Modifier Email') - report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) - report.end_artifact_report() + try: + cursor.execute(query) + all_rows = cursor.fetchall() + usageentries = len(all_rows) - tsvname = 'Google Keep Notes' - tsv(report_folder, data_headers, data_list, tsvname) + if usageentries > 0: + data_list = [] + for row in all_rows: + data_list.append(row) - tlactivity = 'Google Keep Notes' - timeline(report_folder, tlactivity, data_list, data_headers) + report = ArtifactHtmlReport('Google Keep Notes') + report.start_artifact_report(report_folder, 'Google Keep Notes') + report.add_script() + data_headers = ('Time Created', 'Time Last Updated', 'User Edited Timestamp', 'Title', 'Text', 'Last Modifier Email') + report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) + report.end_artifact_report() - else: - logfunc('No Google Keep Notes data available') + tsvname = 'Google Keep Notes' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Google Keep Notes' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Google Keep Notes data available') + + except Exception as e: + logfunc(f'Error reading Google Keep Notes: {e}') + + db.close() \ No newline at end of file diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index 05c90e48..1a232000 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -16,7 +16,6 @@ } } - import xml.etree.ElementTree as ET from datetime import * import os @@ -31,6 +30,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): for file_found in files_found: file_found = str(file_found) file_name = os.path.basename(file_found) + #parsing settings_secure.xml if file_name.endswith('settings_secure.xml'): data_list = [] @@ -65,8 +65,8 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): else: logfunc('No Android Notification History - Status data available') - #parsing notification_policy.xml - if file_name.endswith('notification_policy.xml'): + # PERBAIKAN 1: Gunakan elif agar logika tidak bocor ke else saat file adalah settings_secure.xml + elif file_name.endswith('notification_policy.xml'): data_list = [] if (checkabx(file_found)): multi_root = False @@ -102,14 +102,23 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): logfunc('No Android Notification History - Snoozed notifications data available') else: + # PERBAIKAN 2: Tambahkan pengecekan keamanan. Jangan parse file XML sebagai Protobuf. + if file_name.endswith('.xml'): + continue + #iterate through the notification pbs try: notification_history = notificationhistory_pb2.NotificationHistoryProto() with open(file_found, 'rb') as f: try: - notification_history.ParseFromString(f.read()) #The error 'Wrong wire type in tag. ' likely happens due to the given .proto map file. + content = f.read() + if not content: # Skip jika file kosong + continue + notification_history.ParseFromString(content) except Exception as e: - logfunc(f'Error in the ParseFromString() function. The error message was: {e}') + # Log error tapi jangan hentikan proses keseluruhan, skip file ini saja + logfunc(f'Error in the ParseFromString() function for {file_name}. The error message was: {e}') + continue package_map = {i + 1: pkg for i, pkg in enumerate(notification_history.string_pool.strings)} # one of the protobuf files stores the package name and indexes @@ -158,7 +167,13 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): image_data_length = values['image_data_length'] image_data_offset = values['image_data_offset'] image_uri = values['image_uri'] - file_creation = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(file_name)/1000.0),'UTC') + + # Handle potensi error konversi nama file ke int (jika nama file bukan angka) + try: + file_creation = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(file_name)/1000.0),'UTC') + except ValueError: + file_creation = '' + data_pb_list.append((f'{posted_time}',title,text,package_name,user_id,uid,package_index,channel_name,channel_name_index,channel_id,channel_id_index,conversation_id,conversation_id_index,major_version,image_type,image_bitmap_filename,image_resource_id,image_resource_id_package,image_data_length,image_data_offset,image_uri,file_name,f'{file_creation}')) except Exception as e: logfunc(f'Error while opening notification pb files. The error message was:" {e}"') From b2eb7c40dbba18f20ecbb0e54f698a91a2b9add3 Mon Sep 17 00:00:00 2001 From: Ariandra Dwiratama Date: Wed, 10 Dec 2025 01:15:06 +0700 Subject: [PATCH 2/5] Refactor query construction and error handling in artifact scripts for improved clarity and robustness --- scripts/artifacts/googleMessages.py | 6 ------ scripts/artifacts/keepNotes.py | 6 ------ scripts/artifacts/notificationHistory.py | 6 +----- 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/scripts/artifacts/googleMessages.py b/scripts/artifacts/googleMessages.py index eaffd212..72a5d495 100755 --- a/scripts/artifacts/googleMessages.py +++ b/scripts/artifacts/googleMessages.py @@ -21,15 +21,12 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - # --- VALIDASI KOLOM DINAMIS --- - # Ambil daftar semua kolom yang ada di tabel 'parts' try: cursor.execute("PRAGMA table_info(parts)") columns = [column[1] for column in cursor.fetchall()] except Exception: columns = [] - # 1. Cek keberadaan kolom 'file_size_bytes' if 'file_size_bytes' in columns: file_size_query = ''' CASE @@ -39,14 +36,11 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): else: file_size_query = "'N/A'" - # 2. Cek keberadaan kolom 'local_cache_path' (FIX UNTUK ERROR BARU) if 'local_cache_path' in columns: local_cache_query = 'parts.local_cache_path' else: local_cache_query = "'N/A'" - # --- KONSTRUKSI QUERY --- - # Masukkan logika query dinamis ke dalam f-string query = f''' SELECT datetime(parts.timestamp/1000,'unixepoch') AS "Timestamp (UTC)", diff --git a/scripts/artifacts/keepNotes.py b/scripts/artifacts/keepNotes.py index 73d8a19c..cf48685e 100644 --- a/scripts/artifacts/keepNotes.py +++ b/scripts/artifacts/keepNotes.py @@ -29,14 +29,10 @@ def get_keepNotes(files_found, report_folder, seeker, wrap_text): db = open_sqlite_db_readonly(file_found) cursor = db.cursor() - # --- Cek Keberadaan Tabel --- - # Kita cek apakah tabel FTS (text_search...) ada. Jika tidak, kita gunakan tabel list_item. cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='text_search_note_content_content'") fts_table = cursor.fetchone() - # --- Konstruksi Query --- if fts_table: - # Query Asli (Jika tabel FTS ada) query = ''' SELECT datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", @@ -49,8 +45,6 @@ def get_keepNotes(files_found, report_folder, seeker, wrap_text): INNER JOIN tree_entity ON text_search_note_content_content.docid = tree_entity._id ''' else: - # Query Alternatif (Menggunakan tabel list_item) - # Menggunakan GROUP_CONCAT untuk menggabungkan teks jika berupa checklist atau multiline query = ''' SELECT datetime(tree_entity.time_created/1000, 'unixepoch') AS "Time Created", diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index 1a232000..76959350 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -65,7 +65,6 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): else: logfunc('No Android Notification History - Status data available') - # PERBAIKAN 1: Gunakan elif agar logika tidak bocor ke else saat file adalah settings_secure.xml elif file_name.endswith('notification_policy.xml'): data_list = [] if (checkabx(file_found)): @@ -102,7 +101,6 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): logfunc('No Android Notification History - Snoozed notifications data available') else: - # PERBAIKAN 2: Tambahkan pengecekan keamanan. Jangan parse file XML sebagai Protobuf. if file_name.endswith('.xml'): continue @@ -112,11 +110,10 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): with open(file_found, 'rb') as f: try: content = f.read() - if not content: # Skip jika file kosong + if not content: # Skip if file is empty continue notification_history.ParseFromString(content) except Exception as e: - # Log error tapi jangan hentikan proses keseluruhan, skip file ini saja logfunc(f'Error in the ParseFromString() function for {file_name}. The error message was: {e}') continue @@ -168,7 +165,6 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): image_data_offset = values['image_data_offset'] image_uri = values['image_uri'] - # Handle potensi error konversi nama file ke int (jika nama file bukan angka) try: file_creation = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(file_name)/1000.0),'UTC') except ValueError: From b401a3fcc295b86fbf93571aa6b860019e514ddd Mon Sep 17 00:00:00 2001 From: Ariandra Dwiratama Date: Wed, 10 Dec 2025 14:43:39 +0700 Subject: [PATCH 3/5] Refactor string formatting and exception handling in Google Duo and Notification History scripts for improved clarity and robustness --- scripts/artifacts/googleDuo.py | 18 ++++++-------- scripts/artifacts/notificationHistory.py | 31 ++++++++++++------------ 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/scripts/artifacts/googleDuo.py b/scripts/artifacts/googleDuo.py index 20250f96..894eda26 100755 --- a/scripts/artifacts/googleDuo.py +++ b/scripts/artifacts/googleDuo.py @@ -1,11 +1,9 @@ import os import shutil import sqlite3 -import textwrap -import scripts.artifacts.artGlobals from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly def get_googleDuo(files_found, report_folder, seeker, wrap_text): @@ -57,10 +55,10 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Duo - Call History' + tsvname = 'Google Duo - Call History' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Call History' + tlactivity = 'Google Duo - Call History' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Call History data available') @@ -94,10 +92,10 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Duo - Contacts' + tsvname = 'Google Duo - Contacts' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Contacts' + tlactivity = 'Google Duo - Contacts' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Contacts data available') @@ -109,7 +107,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): cursor.execute("PRAGMA table_info(messages)") columns = [column[1] for column in cursor.fetchall()] has_saved_status = 'saved_status' in columns - except Exception: + except (sqlite3.OperationalError, sqlite3.DatabaseError): has_saved_status = False # Define the 'File Saved' column selection based on availability @@ -182,10 +180,10 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Content']) report.end_artifact_report() - tsvname = f'Google Duo - Notes' + tsvname = 'Google Duo - Notes' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Duo - Notes' + tlactivity = 'Google Duo - Notes' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Duo - Notes data available') diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index 76959350..ecc59764 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -17,7 +17,6 @@ } import xml.etree.ElementTree as ET -from datetime import * import os import scripts.artifacts.notification_history_pb.notificationhistory_pb2 as notificationhistory_pb2 @@ -27,9 +26,11 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): data_pb_list = [] + file_directory = None for file_found in files_found: file_found = str(file_found) file_name = os.path.basename(file_found) + file_directory = os.path.dirname(file_found) #parsing settings_secure.xml if file_name.endswith('settings_secure.xml'): @@ -51,7 +52,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): pass # setting not available if data_list: - description = f'Indicates whether "Notification History" feature is enabled.' + description = 'Indicates whether "Notification History" feature is enabled.' report = ArtifactHtmlReport('Android Notification History - Status') report.start_artifact_report(report_folder, 'Status',description) report.add_script() @@ -59,7 +60,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Android Notification History - Status' + tsvname = 'Android Notification History - Status' tsv(report_folder, data_headers, data_list, tsvname) else: @@ -86,7 +87,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): else: pass #no snoozed notifications found if data_list: - description = f'Notifications the user chose to snooze for a specific time interval' + description = 'Notifications the user chose to snooze for a specific time interval' report = ArtifactHtmlReport('Android Notification History - Snoozed notifications') report.start_artifact_report(report_folder, 'Snoozed notifications', description) #'Android Notification History - Snoozed notifications') report.add_script() @@ -94,7 +95,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Android Notification History - Snoozed notifications' + tsvname = 'Android Notification History - Snoozed notifications' tsv(report_folder, data_headers, data_list, tsvname) else: @@ -113,7 +114,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): if not content: # Skip if file is empty continue notification_history.ParseFromString(content) - except Exception as e: + except (ValueError, TypeError) as e: logfunc(f'Error in the ParseFromString() function for {file_name}. The error message was: {e}') continue @@ -171,23 +172,23 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): file_creation = '' data_pb_list.append((f'{posted_time}',title,text,package_name,user_id,uid,package_index,channel_name,channel_name_index,channel_id,channel_id_index,conversation_id,conversation_id_index,major_version,image_type,image_bitmap_filename,image_resource_id,image_resource_id_package,image_data_length,image_data_offset,image_uri,file_name,f'{file_creation}')) - except Exception as e: - logfunc(f'Error while opening notification pb files. The error message was:" {e}"') - + except (IOError, OSError) as e: + logfunc(f'Error reading file {file_found}: {e}') + if len(data_pb_list) > 0: - description = f'A history of the notifications that landed on the device during the last 24h' + description = 'A history of the notifications that landed on the device during the last 24h' report = ArtifactHtmlReport('Android Notification History - Notifications') - report.start_artifact_report(report_folder, f'Notifications', description) + report.start_artifact_report(report_folder, 'Notifications', description) report.add_script() data_headers = ('Posted Time','Title', 'Text','Package Name','User ID','UID','Package Index','Channel Name','Channel Name Index','Channel ID','Channel ID Index','Conversation ID','Conversation ID Index','Major Version','Image Type','Image Bitmap Filename','Image Resource ID','Image Resource ID Package','Image Data Length','Image Data Offset','Image URI','Protobuf File Name','Protobuf File Creation Date')#,'','','','','','','','','','','','','','') - file_directory = os.path.dirname(file_found) + report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) report.end_artifact_report() - tsvname = f'Android Notification History - Notifications' + tsvname = 'Android Notification History - Notifications' tsv(report_folder, data_headers, data_pb_list, tsvname) - tlactivity = f'Android Notification History - Notifications' + tlactivity = 'Android Notification History - Notifications' timeline(report_folder, tlactivity, data_pb_list, data_headers) else: - logfunc(f'No Android Notification History - Notifications available') \ No newline at end of file + logfunc('No Android Notification History - Notifications available') \ No newline at end of file From e98f944a14bf766d850438377a80d95d80fd48a8 Mon Sep 17 00:00:00 2001 From: unknown Date: Wed, 10 Dec 2025 14:58:02 +0700 Subject: [PATCH 4/5] fix: notificationHistory and googleDuo minorbug --- scripts/artifacts/googleDuo.py | 7 ++- scripts/artifacts/notificationHistory.py | 77 +++++++++++++++--------- 2 files changed, 51 insertions(+), 33 deletions(-) diff --git a/scripts/artifacts/googleDuo.py b/scripts/artifacts/googleDuo.py index 894eda26..a34d653c 100755 --- a/scripts/artifacts/googleDuo.py +++ b/scripts/artifacts/googleDuo.py @@ -5,7 +5,7 @@ from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly -def get_googleDuo(files_found, report_folder, seeker, wrap_text): +def get_googleDuo(files_found, report_folder, _seeker, _wrap_text): for file_found in files_found: file_found = str(file_found) @@ -158,7 +158,7 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): viewed_ts = row[2] sender_id = row[3] recipient_id = row[4] - content_uri = row[5] + # row[5] is content_uri, unused in report logic, removed to fix pylint warning content_name = row[6] content_size = row[7] file_saved = row[8] @@ -170,7 +170,8 @@ def get_googleDuo(files_found, report_folder, seeker, wrap_text): data_file_name = os.path.basename(match) thumb = f'' - data_list.append((row[0],row[1],row[2],row[3],row[4],thumb,row[7],row[8])) + # FIX: Using the variables defined above instead of row[] indices + data_list.append((sent_ts, received_ts, viewed_ts, sender_id, recipient_id, thumb, content_size, file_saved)) report = ArtifactHtmlReport('Google Duo - Notes') report.start_artifact_report(report_folder, 'Google Duo - Notes') diff --git a/scripts/artifacts/notificationHistory.py b/scripts/artifacts/notificationHistory.py index ecc59764..350ecb7e 100644 --- a/scripts/artifacts/notificationHistory.py +++ b/scripts/artifacts/notificationHistory.py @@ -1,7 +1,7 @@ __artifacts_v2__ = { "Android Notification History": { "name": "Android Notification History", - "description": "Get Android notifications' history, policy and settings. This parser is based on a research project", + "description": "Get Android notifications' history, policy and settings.", "author": "Evangelos Dragonas (@theAtropos4n6)", "version": "0.0.1", "date": "2024-07-02", @@ -18,25 +18,26 @@ import xml.etree.ElementTree as ET import os -import scripts.artifacts.notification_history_pb.notificationhistory_pb2 as notificationhistory_pb2 - +from scripts.artifacts.notification_history_pb import notificationhistory_pb2 from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, abxread, checkabx,convert_ts_int_to_utc,convert_utc_human_to_timezone - +from scripts.ilapfuncs import logfunc, tsv, timeline, abxread, checkabx, convert_ts_int_to_utc, convert_utc_human_to_timezone -def get_notificationHistory(files_found, report_folder, seeker, wrap_text): +def get_notificationHistory(files_found, report_folder, _seeker, _wrap_text): data_pb_list = [] file_directory = None + for file_found in files_found: file_found = str(file_found) file_name = os.path.basename(file_found) - file_directory = os.path.dirname(file_found) - #parsing settings_secure.xml + # Simpan direktori file terakhir yang diproses untuk digunakan di report + current_file_directory = os.path.dirname(file_found) + + # parsing settings_secure.xml if file_name.endswith('settings_secure.xml'): data_list = [] user = os.path.basename(os.path.dirname(file_found)) - if (checkabx(file_found)): + if checkabx(file_found): multi_root = True tree = abxread(file_found, multi_root) else: @@ -48,13 +49,11 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): value = setting.attrib.get('value') value = "Enabled" if value == "1" else "Disabled" if value == "0" else "Unknown" data_list.append((value, user)) - else: - pass # setting not available if data_list: description = 'Indicates whether "Notification History" feature is enabled.' report = ArtifactHtmlReport('Android Notification History - Status') - report.start_artifact_report(report_folder, 'Status',description) + report.start_artifact_report(report_folder, 'Status', description) report.add_script() data_headers = ('Status', 'User') report.write_artifact_data_table(data_headers, data_list, file_found) @@ -68,7 +67,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): elif file_name.endswith('notification_policy.xml'): data_list = [] - if (checkabx(file_found)): + if checkabx(file_found): multi_root = False tree = abxread(file_found, multi_root) else: @@ -81,15 +80,14 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): for notification in elem: if notification.tag == 'notification': notification_ts = int(notification.attrib.get('time')) - snooze_time = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(notification_ts/1000.0)),'UTC') + snooze_time = convert_utc_human_to_timezone(convert_ts_int_to_utc(int(notification_ts/1000.0)), 'UTC') notification_key = notification.attrib.get('key') data_list.append((f'{snooze_time}', notification_key)) - else: - pass #no snoozed notifications found + if data_list: description = 'Notifications the user chose to snooze for a specific time interval' report = ArtifactHtmlReport('Android Notification History - Snoozed notifications') - report.start_artifact_report(report_folder, 'Snoozed notifications', description) #'Android Notification History - Snoozed notifications') + report.start_artifact_report(report_folder, 'Snoozed notifications', description) report.add_script() data_headers = ('Reminder Time', 'Snoozed Notification') report.write_artifact_data_table(data_headers, data_list, file_found) @@ -97,7 +95,6 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): tsvname = 'Android Notification History - Snoozed notifications' tsv(report_folder, data_headers, data_list, tsvname) - else: logfunc('No Android Notification History - Snoozed notifications data available') @@ -105,7 +102,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): if file_name.endswith('.xml'): continue - #iterate through the notification pbs + # iterate through the notification pbs try: notification_history = notificationhistory_pb2.NotificationHistoryProto() with open(file_found, 'rb') as f: @@ -118,22 +115,32 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): logfunc(f'Error in the ParseFromString() function for {file_name}. The error message was: {e}') continue - package_map = {i + 1: pkg for i, pkg in enumerate(notification_history.string_pool.strings)} # one of the protobuf files stores the package name and indexes + # FIX: Gunakan getattr untuk menghindari error no-member pada string_pool + string_pool = getattr(notification_history, 'string_pool', None) + if string_pool: + package_map = {i + 1: pkg for i, pkg in enumerate(string_pool.strings)} + else: + package_map = {} + + # FIX: Gunakan getattr untuk major_version + major_version = getattr(notification_history, 'major_version', None) if notification_history.HasField('major_version') else None + + # FIX: Gunakan getattr untuk notification list + notifications = getattr(notification_history, 'notification', []) - major_version = notification_history.major_version if notification_history.HasField('major_version') else None # notification format version should be 1 - for notification in notification_history.notification: - package_name = notification.package if notification.package else package_map.get(notification.package_index, "") #retrieves package from the map if not stored locally + for notification in notifications: + package_name = notification.package if notification.package else package_map.get(notification.package_index, "") - #this block tries to fetch the value of each field from within the parsed protobuf file e.g. variable user_id -> recovers the user_id field from the pb fields = ['uid', 'user_id', 'package_index', 'channel_name', 'channel_id','channel_id_index', 'channel_name_index', 'conversation_id', 'conversation_id_index'] - defaults = {field: 'Error' for field in fields} + # HAPUS: defaults = {field: 'Error' for field in fields} (Unused variable removed) + values = {} for field in fields: try: values[field] = getattr(notification, field) except AttributeError: values[field] = 'Error' - #extra block that does the same for the notifications with icons + if notification.HasField('icon'): icon_fields = ['image_type', 'image_bitmap_filename', 'image_resource_id', 'image_resource_id_package','image_data_length', 'image_data_offset', 'image_uri'] for icon_field in icon_fields: @@ -145,7 +152,7 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): ] for icon_field in icon_fields: values[icon_field] = None - #here the returned values are assigned to the variables which are reported + uid = values['uid'] user_id = values['user_id'] package_index = values['package_index'] @@ -172,6 +179,10 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): file_creation = '' data_pb_list.append((f'{posted_time}',title,text,package_name,user_id,uid,package_index,channel_name,channel_name_index,channel_id,channel_id_index,conversation_id,conversation_id_index,major_version,image_type,image_bitmap_filename,image_resource_id,image_resource_id_package,image_data_length,image_data_offset,image_uri,file_name,f'{file_creation}')) + + # Set file_directory jika berhasil mendapatkan data dari setidaknya satu file + file_directory = current_file_directory + except (IOError, OSError) as e: logfunc(f'Error reading file {file_found}: {e}') @@ -180,9 +191,15 @@ def get_notificationHistory(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Android Notification History - Notifications') report.start_artifact_report(report_folder, 'Notifications', description) report.add_script() - data_headers = ('Posted Time','Title', 'Text','Package Name','User ID','UID','Package Index','Channel Name','Channel Name Index','Channel ID','Channel ID Index','Conversation ID','Conversation ID Index','Major Version','Image Type','Image Bitmap Filename','Image Resource ID','Image Resource ID Package','Image Data Length','Image Data Offset','Image URI','Protobuf File Name','Protobuf File Creation Date')#,'','','','','','','','','','','','','','') - report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) - report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) + data_headers = ('Posted Time','Title', 'Text','Package Name','User ID','UID','Package Index','Channel Name','Channel Name Index','Channel ID','Channel ID Index','Conversation ID','Conversation ID Index','Major Version','Image Type','Image Bitmap Filename','Image Resource ID','Image Resource ID Package','Image Data Length','Image Data Offset','Image URI','Protobuf File Name','Protobuf File Creation Date') + + # Pastikan file_directory memiliki nilai sebelum digunakan + if file_directory: + report.write_artifact_data_table(data_headers, data_pb_list, file_directory, html_escape=False) + else: + # Fallback jika file_directory entah kenapa masih None tapi data ada (kasus edge case) + report.write_artifact_data_table(data_headers, data_pb_list, "", html_escape=False) + report.end_artifact_report() tsvname = 'Android Notification History - Notifications' From a3bdc0c711ed0c55f0c7b9f8c1c3621f07b653a8 Mon Sep 17 00:00:00 2001 From: Ariandra Dwiratama Date: Wed, 10 Dec 2025 15:02:03 +0700 Subject: [PATCH 5/5] refactor: improve exception handling and parameter naming in battery and google messages scripts --- .../artifacts/deviceHealthServices_Battery.py | 17 ++++++----------- scripts/artifacts/googleMessages.py | 14 ++++++-------- 2 files changed, 12 insertions(+), 19 deletions(-) diff --git a/scripts/artifacts/deviceHealthServices_Battery.py b/scripts/artifacts/deviceHealthServices_Battery.py index 2d578c0c..0fffd920 100644 --- a/scripts/artifacts/deviceHealthServices_Battery.py +++ b/scripts/artifacts/deviceHealthServices_Battery.py @@ -28,17 +28,12 @@ "artifact_icon": "bluetooth" } } - -import sqlite3 -import textwrap import os -from packaging import version -from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import artifact_processor, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone @artifact_processor -def Turbo_Battery(files_found, report_folder, seeker, wrap_text): +def Turbo_Battery(files_found, _report_folder, seeker, _wrap_text): source_file_turbo = '' turbo_db = '' data_list = [] @@ -50,7 +45,7 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): time_offset = getattr(seeker, 'time_zone', 0) if time_offset is None: time_offset = 0 - except Exception: + except AttributeError: time_offset = 0 for file_found in files_found: @@ -92,7 +87,7 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): else: try: timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) - except Exception: + except (ValueError, TypeError): # fallback to UTC human timestamp if timezone conversion fails timestamp = convert_ts_human_to_utc(timestamp) data_list.append((timestamp,row[1],row[2],row[3],row[4],file_found)) @@ -104,7 +99,7 @@ def Turbo_Battery(files_found, report_folder, seeker, wrap_text): return data_headers, data_list, source_file_turbo @artifact_processor -def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): +def Turbo_Bluetooth(files_found, _report_folder, seeker, _wrap_text): source_file_bluetooth = '' bluetooth_db = '' data_list = [] @@ -116,7 +111,7 @@ def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): time_offset = getattr(seeker, 'time_zone', 0) if time_offset is None: time_offset = 0 - except Exception: + except AttributeError: time_offset = 0 for file_found in files_found: @@ -149,7 +144,7 @@ def Turbo_Bluetooth(files_found, report_folder, seeker, wrap_text): else: try: timestamp = convert_utc_human_to_timezone(convert_ts_human_to_utc(timestamp), time_offset) - except Exception: + except (ValueError, TypeError): timestamp = convert_ts_human_to_utc(timestamp) data_list.append((timestamp,row[1],row[2],row[3],row[4],row[5],file_found)) db.close() diff --git a/scripts/artifacts/googleMessages.py b/scripts/artifacts/googleMessages.py index 72a5d495..437e9e94 100755 --- a/scripts/artifacts/googleMessages.py +++ b/scripts/artifacts/googleMessages.py @@ -4,14 +4,12 @@ # Version: 0.1 # Requirements: None -import os import sqlite3 -import textwrap from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly -def get_googleMessages(files_found, report_folder, seeker, wrap_text): +def get_googleMessages(files_found, report_folder, _seeker, _wrap_text): for file_found in files_found: file_found = str(file_found) @@ -24,7 +22,7 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): try: cursor.execute("PRAGMA table_info(parts)") columns = [column[1] for column in cursor.fetchall()] - except Exception: + except sqlite3.OperationalError: columns = [] if 'file_size_bytes' in columns: @@ -62,7 +60,7 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): cursor.execute(query) all_rows = cursor.fetchall() usageentries = len(all_rows) - except Exception as e: + except sqlite3.Error as e: logfunc(f'Error executing query in Google Messages: {e}') usageentries = 0 @@ -78,10 +76,10 @@ def get_googleMessages(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Google Messages' + tsvname = 'Google Messages' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Google Messages' + tlactivity = 'Google Messages' timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Google Messages data available')