Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 50 additions & 45 deletions scripts/artifacts/chromeAutofill.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,53 +94,58 @@ def get_chromeAutofill(files_found, report_folder, seeker, wrap_text):
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc(f'No {browser_name} - Autofill - Entries data available')

cursor.execute(f'''
select
datetime(date_modified, 'unixepoch'),
autofill_profiles.guid,
autofill_profile_names.first_name,
autofill_profile_names.middle_name,
autofill_profile_names.last_name,
autofill_profile_emails.email,
autofill_profile_phones.number,
autofill_profiles.company_name,
autofill_profiles.street_address,
autofill_profiles.city,
autofill_profiles.state,
autofill_profiles.zipcode,
datetime(use_date, 'unixepoch'),
autofill_profiles.use_count
from autofill_profiles
inner join autofill_profile_emails ON autofill_profile_emails.guid = autofill_profiles.guid
inner join autofill_profile_phones ON autofill_profiles.guid = autofill_profile_phones.guid
inner join autofill_profile_names ON autofill_profile_phones.guid = autofill_profile_names.guid
''')

cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='autofill_profiles'")
if cursor.fetchone():
try:
cursor.execute(f'''
select
datetime(date_modified, 'unixepoch'),
autofill_profiles.guid,
autofill_profile_names.first_name,
autofill_profile_names.middle_name,
autofill_profile_names.last_name,
autofill_profile_emails.email,
autofill_profile_phones.number,
autofill_profiles.company_name,
autofill_profiles.street_address,
autofill_profiles.city,
autofill_profiles.state,
autofill_profiles.zipcode,
datetime(use_date, 'unixepoch'),
autofill_profiles.use_count
from autofill_profiles
inner join autofill_profile_emails ON autofill_profile_emails.guid = autofill_profiles.guid
inner join autofill_profile_phones ON autofill_profiles.guid = autofill_profile_phones.guid
inner join autofill_profile_names ON autofill_profile_phones.guid = autofill_profile_names.guid
''')

all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport(f'{browser_name} - Autofill - Profiles')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'{browser_name} - Autofill - Profiles.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('Date Modified','GUID','First Name','Middle Name','Last Name','Email','Phone Number','Company Name','Address','City','State','Zip Code','Date Last Used','Use Count')
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13]))
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport(f'{browser_name} - Autofill - Profiles')
#check for existing and get next name for report file, so report from another file does not get overwritten
report_path = os.path.join(report_folder, f'{browser_name} - Autofill - Profiles.temphtml')
report_path = get_next_unused_name(report_path)[:-9] # remove .temphtml
report.start_artifact_report(report_folder, os.path.basename(report_path))
report.add_script()
data_headers = ('Date Modified','GUID','First Name','Middle Name','Last Name','Email','Phone Number','Company Name','Address','City','State','Zip Code','Date Last Used','Use Count')
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13]))

report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()

tsvname = f'{browser_name} - Autofill - Profiles'
tsv(report_folder, data_headers, data_list, tsvname)

tlactivity = f'{browser_name} - Autofill - Profiles'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc(f'No {browser_name} - Autofill - Profiles data available')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()

tsvname = f'{browser_name} - Autofill - Profiles'
tsv(report_folder, data_headers, data_list, tsvname)

tlactivity = f'{browser_name} - Autofill - Profiles'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc(f'No {browser_name} - Autofill - Profiles data available')
except sqlite3.OperationalError as e:
logfunc(f'Error reading {browser_name} - Autofill - Profiles: {e}')

db.close()

Expand Down
36 changes: 25 additions & 11 deletions scripts/artifacts/speedtest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,20 @@

from datetime import datetime, timezone, timedelta
from scripts.ilapfuncs import open_sqlite_db_readonly, logfunc, artifact_processor, convert_unix_ts_to_utc
import json

import json, re

# Fix ISO 8601 timestamps that are missing the colon in the timezone offset
def fix_iso_timestamp(ts):
if not isinstance(ts, str):
return ts

match = re.search(r'([+-]\d{2})(\d{2})$', ts)
if match:
fixed = ts[:-5] + match.group(1) + ":" + match.group(2)
return fixed

return ts

@artifact_processor
def speedtest_tests(files_found, report_folder, seeker, wrap_text):
file_path = files_found[0]
Expand All @@ -55,15 +67,15 @@ def speedtest_tests(files_found, report_folder, seeker, wrap_text):
cur.execute('SELECT date, connectionType, ssid, userLatitude, userLongitude, externalIp, internalIp, downloadKbps, uploadKbps FROM UnivSpeedTestResult')
result = cur.fetchall()
except Exception as ex:
logfunc('Error retrieving Speedtest test results: ', ex)
logfunc(f'Error retrieving Speedtest test results: {ex}')

timestamped_result = []
for row in result:
row = list(row)
try:
row[0] = convert_unix_ts_to_utc(row[0])
except Exception:
logfunc('Error converting timestamp for Speedtest test result: ', ex)
except Exception as ex:
logfunc(f'Error converting timestamp for Speedtest test result: {ex}')
timestamped_result.append(row)

return headers, timestamped_result, file_path
Expand All @@ -89,7 +101,8 @@ def speedtest_reports_location(files_found, report_folder, seeker, wrap_text):
try:
j = json.loads(row[0])
location_data = j.get('start', {}).get('location', {})
report_timestamp = datetime.fromisoformat(j.get('start', {}).get('timestamp', '1970-01-01T00:00:00Z')).astimezone(timezone.utc)
ts = fix_iso_timestamp(j.get('start', {}).get('timestamp', '1970-01-01T00:00:00Z'))
report_timestamp = datetime.fromisoformat(ts).astimezone(timezone.utc)
if location_data:
latitude = location_data.get('latitude', None)
longitude = location_data.get('longitude', None)
Expand All @@ -98,7 +111,7 @@ def speedtest_reports_location(files_found, report_folder, seeker, wrap_text):

reports.append((report_timestamp, latitude, longitude, altitude, accuracy))
except Exception as ex:
logfunc('Error retrieving Speedtest reports: ', ex)
logfunc(f'Error retrieving Speedtest reports: {ex}')

return headers, reports, file_path

Expand All @@ -124,8 +137,9 @@ def speedtest_reports_wifi(files_found, report_folder, seeker, wrap_text):
wifi_scan_data = j.get('start', {}).get('extended', {}).get('wifi', {}).get('scanResults', [])

elapsedRealtimeNanos = j.get('start', {}).get('time', {}).get('elapsedRealtimeNanos', 0)
timestamp = j.get('start', {}).get('time', {}).get('timestamp', 0)
boot_time = datetime.fromisoformat(timestamp).astimezone(timezone.utc) - timedelta(microseconds=elapsedRealtimeNanos/1000) if timestamp and elapsedRealtimeNanos else None
timestamp_raw = j.get('start', {}).get('time', {}).get('timestamp', 0)
ts = fix_iso_timestamp(timestamp_raw)
boot_time = datetime.fromisoformat(ts).astimezone(timezone.utc) - timedelta(microseconds=elapsedRealtimeNanos / 1000) if ts and elapsedRealtimeNanos else None

for scan_result in wifi_scan_data:
try:
Expand All @@ -134,11 +148,11 @@ def speedtest_reports_wifi(files_found, report_folder, seeker, wrap_text):
ssid = scan_result.get('SSID')
level = scan_result.get('level')
except Exception as ex:
logfunc('Error retrieving Speedtest Wi-Fi scan data: ', ex)
logfunc(f'Error retrieving Speedtest Wi-Fi scan data: {ex}')

results.append((timestamp, bssid, ssid, level))

except Exception as ex:
logfunc('Error retrieving Speedtest reports: ', ex)
logfunc(f'Error retrieving Speedtest reports: {ex}')

return headers, results, file_path