From b9721585fbc849abcec45604898b7e49f800ed9f Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Fri, 9 Jan 2026 21:23:14 +0530
Subject: [PATCH 1/9] Add parser for Fitbit passive_stats.db (Wear OS)
- Introduces parsing for `passive_stats.db`, primarily found in Pixel Watch and Wear OS extractions.
- Parses `ExerciseSummaryEntity` for workout context (Running, Walking, etc.).
- Extracts raw GPS trackpoints from `ExerciseGpsEntity`.
- Extracts high-frequency heart rate data from `HeartRateStatEntity` and `OpaqueHeartRateEntity`.
- Note: This DB often acts as a raw sensor buffer on Wear OS devices.
---
scripts/artifacts/fitbit_passive_stats.py | 381 ++++++++++++++++++++++
1 file changed, 381 insertions(+)
create mode 100644 scripts/artifacts/fitbit_passive_stats.py
diff --git a/scripts/artifacts/fitbit_passive_stats.py b/scripts/artifacts/fitbit_passive_stats.py
new file mode 100644
index 00000000..719a3744
--- /dev/null
+++ b/scripts/artifacts/fitbit_passive_stats.py
@@ -0,0 +1,381 @@
+# Module Description: Parses Fitbit Passive Stats DB from Wear OS
+# Author: ganeshbs17
+# Date: 2025-01-09
+# Artifact version: 1.0.1
+# Requirements: none
+
+import sqlite3
+
+from scripts.artifact_report import ArtifactHtmlReport
+from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
+
+def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
+
+ # Locate the specific database file
+ source_db = ''
+ for file_found in files_found:
+ if file_found.endswith('passive_stats.db'):
+ source_db = file_found
+ break
+
+ if source_db:
+ db = open_sqlite_db_readonly(source_db)
+ cursor = db.cursor()
+
+ # -----------------------------------------------------------------------
+ # 1. Exercise Summaries (Workouts - The "Parent" Record)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Start Time",
+ sessionId,
+ case exerciseTypeId
+ when 0 then 'Unknown'
+ when 39 then 'Running'
+ when 8 then 'Cycling'
+ when 79 then 'Walking'
+ when 40 then 'Swimming'
+ when 7 then 'Hiking'
+ else exerciseTypeId
+ end as "Activity Type",
+ totalDistanceMm / 1000000.0 as "Distance (KM)",
+ steps,
+ caloriesBurned,
+ avgHeartRate,
+ elevationGainFt
+ FROM ExerciseSummaryEntity
+ ORDER BY time DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Workouts')
+ report.start_artifact_report(report_folder, 'Fitbit - Workouts')
+ report.add_script()
+
+ data_headers = ('Start Time', 'Session ID', 'Activity Type', 'Distance (KM)', 'Steps', 'Calories', 'Avg HR', 'Elevation (ft)')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Workouts')
+ timeline(report_folder, 'Fitbit - Workouts', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Workout Summaries found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Workouts: {e}')
+
+ # -----------------------------------------------------------------------
+ # 2. Exercise GPS Data (Track Points)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Timestamp",
+ latitude,
+ longitude,
+ altitude,
+ speed,
+ bearing,
+ estimatedPositionError
+ FROM ExerciseGpsEntity
+ ORDER BY time ASC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - GPS Trackpoints')
+ report.start_artifact_report(report_folder, 'Fitbit - GPS Trackpoints')
+ report.add_script()
+
+ data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Altitude', 'Speed', 'Bearing', 'Est. Error')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - GPS Trackpoints')
+ timeline(report_folder, 'Fitbit - GPS Trackpoints', data_list, data_headers)
+ else:
+ logfunc('No Fitbit GPS data found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit GPS: {e}')
+
+ # -----------------------------------------------------------------------
+ # 3. Heart Rate Stats (Standard)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ datetime(endTime/1000, 'unixepoch') as "End Time",
+ value as "BPM",
+ accuracy
+ FROM HeartRateStatEntity
+ ORDER BY startTime DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Heart Rate Stats')
+ report.start_artifact_report(report_folder, 'Fitbit - Heart Rate Stats')
+ report.add_script()
+
+ data_headers = ('Start Time', 'End Time', 'BPM', 'Accuracy')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Heart Rate Stats')
+ timeline(report_folder, 'Fitbit - Heart Rate Stats', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Heart Rate Stats found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit HR Stats: {e}')
+
+ # -----------------------------------------------------------------------
+ # 4. Live Pace
+ # -----------------------------------------------------------------------
+ try:
+ # Note: timeSeconds column contains milliseconds in newer app versions
+ cursor.execute('''
+ SELECT
+ datetime(timeSeconds/1000, 'unixepoch') as "Timestamp",
+ sessionId,
+ statType,
+ value
+ FROM LivePaceEntity
+ ORDER BY timeSeconds DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Live Pace')
+ report.start_artifact_report(report_folder, 'Fitbit - Live Pace')
+ report.add_script()
+
+ data_headers = ('Timestamp', 'Session ID', 'Stat Type', 'Value')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Live Pace')
+ timeline(report_folder, 'Fitbit - Live Pace', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Live Pace data found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Live Pace: {e}')
+
+ # -----------------------------------------------------------------------
+ # 5. Sleep Periods
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(sleepStartTime/1000, 'unixepoch') as "Sleep Start",
+ datetime(sleepEndTime/1000, 'unixepoch') as "Sleep End",
+ (sleepEndTime - sleepStartTime)/1000/60 as "Duration (Mins)"
+ FROM LocalSleepPeriodsEntity
+ ORDER BY sleepStartTime DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Sleep')
+ report.start_artifact_report(report_folder, 'Fitbit - Sleep')
+ report.add_script()
+
+ data_headers = ('Sleep Start', 'Sleep End', 'Duration (Mins)')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep')
+ timeline(report_folder, 'Fitbit - Sleep', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Sleep data found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Sleep: {e}')
+
+ # -----------------------------------------------------------------------
+ # 6. Active Zone Minutes (AZM)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ datetime(endTime/1000, 'unixepoch') as "End Time",
+ activeZone,
+ value as "Points",
+ lastBpm
+ FROM PassiveAzmEntity
+ ORDER BY startTime DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Active Zones')
+ report.start_artifact_report(report_folder, 'Fitbit - Active Zones')
+ report.add_script()
+
+ data_headers = ('Start Time', 'End Time', 'Zone ID', 'Points', 'Last BPM')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Active Zones')
+ timeline(report_folder, 'Fitbit - Active Zones', data_list, data_headers)
+ else:
+ logfunc('No Fitbit AZM data found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit AZM: {e}')
+
+ # -----------------------------------------------------------------------
+ # 7. Daily Stats (Pattern of Life)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(dayStart/1000, 'unixepoch') as "Date",
+ datetime(lastUpdated/1000, 'unixepoch') as "Last Updated",
+ case statId
+ when 1 then 'Steps'
+ when 2 then 'Distance'
+ when 3 then 'Calories'
+ when 4 then 'Floors'
+ else statId
+ end as "Stat Type",
+ value
+ FROM PassiveDailyStatsEntity
+ ORDER BY dayStart DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Daily Stats')
+ report.start_artifact_report(report_folder, 'Fitbit - Daily Stats')
+ report.add_script()
+
+ data_headers = ('Date', 'Last Updated', 'Stat Type', 'Value')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Daily Stats')
+ timeline(report_folder, 'Fitbit - Daily Stats', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Daily Stats found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Daily Stats: {e}')
+
+ # -----------------------------------------------------------------------
+ # 8. Exercise Splits (Pace/Km)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Split Time",
+ sessionId,
+ avgPaceMilliSecPerKm / 1000 / 60.0 as "Avg Pace (Min/Km)",
+ avgHeartRate,
+ steps,
+ caloriesBurned
+ FROM ExerciseSplitAnnotationEntity
+ ORDER BY time ASC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Workout Splits')
+ report.start_artifact_report(report_folder, 'Fitbit - Workout Splits')
+ report.add_script()
+
+ data_headers = ('Split Time', 'Session ID', 'Avg Pace (Min/Km)', 'Avg HR', 'Steps', 'Calories')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Workout Splits')
+ timeline(report_folder, 'Fitbit - Workout Splits', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Splits found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Splits: {e}')
+
+ # -----------------------------------------------------------------------
+ # 9. Opaque Heart Rate (Raw Sensor Data)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(timestamp/1000, 'unixepoch') as "Timestamp",
+ baseHeartRate as "Base HR",
+ confidence as "Confidence (0-3)"
+ FROM OpaqueHeartRateEntity
+ ORDER BY timestamp DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Opaque HR')
+ report.start_artifact_report(report_folder, 'Fitbit - Opaque HR')
+ report.add_script()
+
+ data_headers = ('Timestamp', 'Base HR', 'Confidence')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Opaque HR')
+ timeline(report_folder, 'Fitbit - Opaque HR', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Opaque HR data found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Opaque HR: {e}')
+
+ db.close()
+ else:
+ logfunc('Fitbit passive_stats.db not found')
+
+__artifacts__ = {
+ "FitbitPassiveStats": (
+ "Fitbit",
+ ('*/databases/passive_stats.db'),
+ get_fitbit_passive_stats)
+}
\ No newline at end of file
From 9f7f95efbe0f935c38254e539e21cb4d84d1a3e9 Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Fri, 9 Jan 2026 21:25:36 +0530
Subject: [PATCH 2/9] Add parser for Fitbit user.db (Wear OS)
- Introduces parsing for `user.db` for profile and activity attribution.
- Parses `FitbitProfileEntity` for user identity (Name, DOB, User ID).
- Parses `ActivityExerciseEntity` for logical workout history.
- Implements JSON parsing for `SedentaryDataEntity` to extract granular hourly step counts.
---
scripts/artifacts/fitbitUserDb.py | 250 ++++++++++++++++++++++++++++++
1 file changed, 250 insertions(+)
create mode 100644 scripts/artifacts/fitbitUserDb.py
diff --git a/scripts/artifacts/fitbitUserDb.py b/scripts/artifacts/fitbitUserDb.py
new file mode 100644
index 00000000..0e38fffd
--- /dev/null
+++ b/scripts/artifacts/fitbitUserDb.py
@@ -0,0 +1,250 @@
+# Module Description: Parses Fitbit User DB from Wear OS devices to extract user profile, activity history, daily summaries, hourly steps, and sleep logs.
+# Author: ganeshbs17
+# Date: 2025-01-09
+# Artifact version: 1.0.4
+# Requirements: none
+
+import sqlite3
+import json
+
+from scripts.artifact_report import ArtifactHtmlReport
+from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
+
+def get_fitbit_user_db(files_found, report_folder, seeker, wrap_text):
+
+ source_db = ''
+ for file_found in files_found:
+ if file_found.endswith('user.db'):
+ source_db = file_found
+ break
+
+ if source_db:
+ db = open_sqlite_db_readonly(source_db)
+ cursor = db.cursor()
+
+ # -----------------------------------------------------------------------
+ # 1. User Profile
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ fullName,
+ displayName,
+ email,
+ gender,
+ dateOfBirth,
+ height,
+ weight,
+ memberSince,
+ userId
+ FROM FitbitProfileEntity
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - User Profile')
+ report.start_artifact_report(report_folder, 'Fitbit - User Profile')
+ report.add_script()
+
+ data_headers = ('Full Name', 'Display Name', 'Email', 'Gender', 'DOB', 'Height', 'Weight', 'Member Since', 'User ID')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - User Profile')
+ else:
+ logfunc('No Fitbit User Profile found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Profile: {e}')
+
+ # -----------------------------------------------------------------------
+ # 2. Activity / Workout History
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ name as "Activity Name",
+ duration/1000/60 as "Duration (Mins)",
+ distance,
+ distanceUnit,
+ steps,
+ calories,
+ averageHeartRate,
+ elevationGain,
+ activeZoneMinutes,
+ logId
+ FROM ActivityExerciseEntity
+ ORDER BY startTime DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Activity History')
+ report.start_artifact_report(report_folder, 'Fitbit - Activity History')
+ report.add_script()
+
+ data_headers = ('Start Time', 'Activity Name', 'Duration (Mins)', 'Distance', 'Unit', 'Steps', 'Calories', 'Avg HR', 'Elevation', 'AZM', 'Log ID')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Activity History')
+ timeline(report_folder, 'Fitbit - Activity History', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Activity History found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Activity History: {e}')
+
+ # -----------------------------------------------------------------------
+ # 3. DAILY Summaries (One row per day)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ date,
+ totalMinutesMoving,
+ totalMinutesSedentary,
+ longestDuration as "Longest Sedentary Duration",
+ longestStart as "Longest Sedentary Start Time"
+ FROM SedentaryDataEntity
+ ORDER BY date DESC
+ ''')
+
+ daily_rows = cursor.fetchall()
+
+ if len(daily_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Daily Activity Summary')
+ report.start_artifact_report(report_folder, 'Fitbit - Daily Activity Summary')
+ report.add_script()
+
+ data_headers = ('Date', 'Total Moving Mins', 'Total Sedentary Mins', 'Longest Sedentary Duration (Mins)', 'Longest Sedentary Start Time')
+ data_list = []
+ for row in daily_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Daily Activity Summary')
+ # Timeline optional for Daily stats, but good for overview
+ timeline(report_folder, 'Fitbit - Daily Activity Summary', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Daily Summaries found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Daily Summaries: {e}')
+
+ # -----------------------------------------------------------------------
+ # 4. HOURLY Steps (Flattened JSON)
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ date,
+ hourlyData
+ FROM SedentaryDataEntity
+ ORDER BY date DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ hourly_data_list = []
+
+ for row in all_rows:
+ date_str = row[0]
+ json_data = row[1]
+
+ if json_data:
+ try:
+ parsed_json = json.loads(json_data)
+ hourly_list = parsed_json.get('hourlyData', [])
+
+ for hour_entry in hourly_list:
+ time_str = hour_entry.get('dateTime', '')
+ steps_str = hour_entry.get('steps', '0')
+
+ # Create a full timestamp for the timeline
+ full_timestamp = f"{date_str} {time_str}"
+
+ hourly_data_list.append((full_timestamp, date_str, time_str, steps_str))
+
+ except ValueError:
+ pass
+
+ if len(hourly_data_list) > 0:
+ report = ArtifactHtmlReport('Fitbit - Hourly Steps')
+ report.start_artifact_report(report_folder, 'Fitbit - Hourly Steps')
+ report.add_script()
+
+ data_headers = ('Full Timestamp', 'Date', 'Time', 'Steps')
+
+ report.write_artifact_data_table(data_headers, hourly_data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, hourly_data_list, 'Fitbit - Hourly Steps')
+ timeline(report_folder, 'Fitbit - Hourly Steps', hourly_data_list, data_headers)
+ else:
+ logfunc('No Fitbit Hourly Steps found')
+
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Hourly Steps: {e}')
+
+ # -----------------------------------------------------------------------
+ # 5. Sleep Logs
+ # -----------------------------------------------------------------------
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Sleep Start",
+ datetime(endTime/1000, 'unixepoch') as "Sleep End",
+ dateOfSleep,
+ minutesAsleep,
+ minutesAwake,
+ minutesToFallAsleep,
+ minutesAfterWakeup,
+ type as "Sleep Type",
+ isMainSleep
+ FROM FitbitSleepDateEntity
+ ORDER BY startTime DESC
+ ''')
+
+ all_rows = cursor.fetchall()
+
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Sleep Logs')
+ report.start_artifact_report(report_folder, 'Fitbit - Sleep Logs')
+ report.add_script()
+
+ data_headers = ('Sleep Start', 'Sleep End', 'Date of Sleep', 'Mins Asleep', 'Mins Awake', 'Time to Fall Asleep', 'Time After Wakeup', 'Type', 'Is Main Sleep')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
+
+ report.write_artifact_data_table(data_headers, data_list, source_db)
+ report.end_artifact_report()
+
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep Logs')
+ timeline(report_folder, 'Fitbit - Sleep Logs', data_list, data_headers)
+ else:
+ logfunc('No Fitbit Sleep Logs found')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Sleep Logs: {e}')
+
+ db.close()
+ else:
+ logfunc('Fitbit user.db not found')
+
+__artifacts__ = {
+ "FitbitUserDb": (
+ "Fitbit",
+ ('*/databases/user.db'),
+ get_fitbit_user_db)
+}
\ No newline at end of file
From 6fdcbc9c697ecf23c3af24316f50122dff725f6f Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Mon, 12 Jan 2026 18:02:59 +0530
Subject: [PATCH 3/9] Update Fitbit passive stats module
---
scripts/artifacts/fitbit_passive_stats.py | 66 +++--------------------
1 file changed, 8 insertions(+), 58 deletions(-)
diff --git a/scripts/artifacts/fitbit_passive_stats.py b/scripts/artifacts/fitbit_passive_stats.py
index 719a3744..6f407aea 100644
--- a/scripts/artifacts/fitbit_passive_stats.py
+++ b/scripts/artifacts/fitbit_passive_stats.py
@@ -1,7 +1,7 @@
# Module Description: Parses Fitbit Passive Stats DB from Wear OS
# Author: ganeshbs17
# Date: 2025-01-09
-# Artifact version: 1.0.1
+# Artifact version: 1.0.2
# Requirements: none
import sqlite3
@@ -23,22 +23,14 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
cursor = db.cursor()
# -----------------------------------------------------------------------
- # 1. Exercise Summaries (Workouts - The "Parent" Record)
+ # 1. Exercise Summaries
# -----------------------------------------------------------------------
try:
cursor.execute('''
SELECT
datetime(time/1000, 'unixepoch') as "Start Time",
sessionId,
- case exerciseTypeId
- when 0 then 'Unknown'
- when 39 then 'Running'
- when 8 then 'Cycling'
- when 79 then 'Walking'
- when 40 then 'Swimming'
- when 7 then 'Hiking'
- else exerciseTypeId
- end as "Activity Type",
+ exerciseTypeId as "Activity Type ID",
totalDistanceMm / 1000000.0 as "Distance (KM)",
steps,
caloriesBurned,
@@ -55,7 +47,7 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
report.start_artifact_report(report_folder, 'Fitbit - Workouts')
report.add_script()
- data_headers = ('Start Time', 'Session ID', 'Activity Type', 'Distance (KM)', 'Steps', 'Calories', 'Avg HR', 'Elevation (ft)')
+ data_headers = ('Start Time', 'Session ID', 'Activity Type ID', 'Distance (KM)', 'Steps', 'Calories', 'Avg HR', 'Elevation (ft)')
data_list = []
for row in all_rows:
data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
@@ -110,7 +102,7 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
logfunc(f'Error parsing Fitbit GPS: {e}')
# -----------------------------------------------------------------------
- # 3. Heart Rate Stats (Standard)
+ # 3. Heart Rate Stats
# -----------------------------------------------------------------------
try:
cursor.execute('''
@@ -255,49 +247,7 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
logfunc(f'Error parsing Fitbit AZM: {e}')
# -----------------------------------------------------------------------
- # 7. Daily Stats (Pattern of Life)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(dayStart/1000, 'unixepoch') as "Date",
- datetime(lastUpdated/1000, 'unixepoch') as "Last Updated",
- case statId
- when 1 then 'Steps'
- when 2 then 'Distance'
- when 3 then 'Calories'
- when 4 then 'Floors'
- else statId
- end as "Stat Type",
- value
- FROM PassiveDailyStatsEntity
- ORDER BY dayStart DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Daily Stats')
- report.start_artifact_report(report_folder, 'Fitbit - Daily Stats')
- report.add_script()
-
- data_headers = ('Date', 'Last Updated', 'Stat Type', 'Value')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Daily Stats')
- timeline(report_folder, 'Fitbit - Daily Stats', data_list, data_headers)
- else:
- logfunc('No Fitbit Daily Stats found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Daily Stats: {e}')
-
- # -----------------------------------------------------------------------
- # 8. Exercise Splits (Pace/Km)
+ # 7. Exercise Splits (Pace/Km)
# -----------------------------------------------------------------------
try:
cursor.execute('''
@@ -335,7 +285,7 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
logfunc(f'Error parsing Fitbit Splits: {e}')
# -----------------------------------------------------------------------
- # 9. Opaque Heart Rate (Raw Sensor Data)
+ # 8. Opaque Heart Rate (Raw Sensor Data)
# -----------------------------------------------------------------------
try:
cursor.execute('''
@@ -378,4 +328,4 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
"Fitbit",
('*/databases/passive_stats.db'),
get_fitbit_passive_stats)
-}
\ No newline at end of file
+}
From 3067179b54ad0a8d45bb7c5db1e6ac39ba8a341a Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Mon, 12 Jan 2026 20:24:47 +0530
Subject: [PATCH 4/9] Fix linter errors in Fitbit artifacts
---
scripts/artifacts/fitbitUserDb.py | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/scripts/artifacts/fitbitUserDb.py b/scripts/artifacts/fitbitUserDb.py
index 0e38fffd..c324a50c 100644
--- a/scripts/artifacts/fitbitUserDb.py
+++ b/scripts/artifacts/fitbitUserDb.py
@@ -4,13 +4,13 @@
# Artifact version: 1.0.4
# Requirements: none
-import sqlite3
import json
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
-def get_fitbit_user_db(files_found, report_folder, seeker, wrap_text):
+# pylint: disable=broad-exception-caught
+def get_fitbit_user_db(files_found, report_folder, _seeker, _wrap_text):
source_db = ''
for file_found in files_found:
@@ -247,4 +247,5 @@ def get_fitbit_user_db(files_found, report_folder, seeker, wrap_text):
"Fitbit",
('*/databases/user.db'),
get_fitbit_user_db)
-}
\ No newline at end of file
+
+}
From 2c97d58a72e2c0b29fb21251adfb136e34ef4956 Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Mon, 12 Jan 2026 20:27:42 +0530
Subject: [PATCH 5/9] Fix linter errors in Fitbit artifacts
---
scripts/artifacts/fitbit_passive_stats.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/scripts/artifacts/fitbit_passive_stats.py b/scripts/artifacts/fitbit_passive_stats.py
index 6f407aea..a0e0525f 100644
--- a/scripts/artifacts/fitbit_passive_stats.py
+++ b/scripts/artifacts/fitbit_passive_stats.py
@@ -4,12 +4,11 @@
# Artifact version: 1.0.2
# Requirements: none
-import sqlite3
-
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
-def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
+# pylint: disable=broad-exception-caught
+def get_fitbit_passive_stats(files_found, report_folder, _seeker, _wrap_text):
# Locate the specific database file
source_db = ''
@@ -329,3 +328,4 @@ def get_fitbit_passive_stats(files_found, report_folder, seeker, wrap_text):
('*/databases/passive_stats.db'),
get_fitbit_passive_stats)
}
+
From 4adeb05e68394e9366a77f5891f71bba19216d42 Mon Sep 17 00:00:00 2001
From: Ganesh <65601315+ganeshbs17@users.noreply.github.com>
Date: Tue, 13 Jan 2026 00:01:28 +0530
Subject: [PATCH 6/9] Merge Wear OS parsers into unified Fitbit script
- Consolidate Pixel Watch/Wear OS logic into a single function `get_fitbit_wearos` within `fitbit.py`.
- Register `FitbitWearOS` artifact in `__artifacts_v2__` to target both `user.db` and `passive_stats.db` (including WAL files via wildcard).
- Preserve existing `get_fitbit` function for backward compatibility with Android smartphone extractions.
- Add granular reporting with forensic descriptions for all 13 Wear OS tables (User Profile, Heart Rate, GPS, AZM, etc.).
---
scripts/artifacts/fitbit.py | 442 +++++++++++++++++++++-
scripts/artifacts/fitbitUserDb.py | 251 ------------
scripts/artifacts/fitbit_passive_stats.py | 331 ----------------
3 files changed, 429 insertions(+), 595 deletions(-)
delete mode 100644 scripts/artifacts/fitbitUserDb.py
delete mode 100644 scripts/artifacts/fitbit_passive_stats.py
diff --git a/scripts/artifacts/fitbit.py b/scripts/artifacts/fitbit.py
index fd661dd7..2e2ba5d8 100644
--- a/scripts/artifacts/fitbit.py
+++ b/scripts/artifacts/fitbit.py
@@ -1,26 +1,38 @@
+# Module Description: Parses Fitbit data from Android (Phone) and Wear OS (Watch)
+
+import json
+
+from datetime import datetime, timezone
+from scripts.artifact_report import ArtifactHtmlReport
+from scripts.ilapfuncs import logfunc, tsv, kmlgen, timeline, is_platform_windows, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
+
__artifacts_v2__ = {
"Fitbit": {
- "name": "Fitbit",
- "description": "Parses Fitbit activities",
+ "name": "Fitbit Smartphone Data",
+ "description": "Parses Fitbit activities from Android Smartphone app",
"author": "@AlexisBrignoni",
"version": "0.0.4",
"date": "2021-04-23",
"requirements": "none",
"category": "Fitbit",
- "notes": "Updated 2023-12-12 by @segumarc, wrong file_found was wrote in the 'located at' field in the html report",
+ "notes": "Updated 2023-12-12 by @segumarc",
"paths": ('*/com.fitbit.FitbitMobile/databases/activity_db*','*/com.fitbit.FitbitMobile/databases/device_database*','*/com.fitbit.FitbitMobile/databases/exercise_db*','*/com.fitbit.FitbitMobile/databases/heart_rate_db*','*/com.fitbit.FitbitMobile/databases/sleep*','*/com.fitbit.FitbitMobile/databases/social_db*','*/com.fitbit.FitbitMobile/databases/mobile_track_db*'),
"function": "get_fitbit"
+ },
+ "FitbitWearOS": {
+ "name": "Fitbit Wear OS Data",
+ "description": "Parses User DB and Passive Stats DB from Pixel Watch/Wear OS",
+ "author": "Ganeshbs17",
+ "version": "0.0.1",
+ "date": "2026-01-12",
+ "requirements": "none",
+ "category": "Fitbit",
+ "notes": "Specific to Pixel Watch/Wear OS",
+ "paths": ('*/com.fitbit.FitbitMobile/databases/user.db*', '*/com.fitbit.FitbitMobile/databases/passive_stats.db*'),
+ "function": "get_fitbit_wearos"
}
}
-import os
-import sqlite3
-import textwrap
-
-from datetime import datetime, timezone
-from scripts.artifact_report import ArtifactHtmlReport
-from scripts.ilapfuncs import logfunc, tsv, kmlgen, timeline, is_platform_windows, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
-
def get_fitbit(files_found, report_folder, seeker, wrap_text):
data_list_activity = []
@@ -211,7 +223,7 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
start_time = convert_utc_human_to_timezone(convert_ts_human_to_utc(row[1]),'UTC')
data_list_sleep_summary.append((date_of_sleep,start_time,row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],file_found))
db.close()
-
+
if file_found.endswith('social_db'):
file_found_social = file_found
db = open_sqlite_db_readonly(file_found)
@@ -439,4 +451,408 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
else:
logfunc('No Fitbit Steps data available')
-
\ No newline at end of file
+
+# pylint: disable=broad-exception-caught
+def get_fitbit_wearos(files_found, report_folder, seeker, wrap_text):
+
+ for file_found in files_found:
+ file_found = str(file_found)
+
+ # --- PROCESS USER.DB ---
+ if file_found.endswith('user.db'):
+ db = open_sqlite_db_readonly(file_found)
+ cursor = db.cursor()
+
+ # 1. User Profile
+ try:
+ cursor.execute('''
+ SELECT
+ fullName,
+ displayName,
+ email,
+ gender,
+ dateOfBirth,
+ height,
+ weight,
+ memberSince,
+ userId
+ FROM FitbitProfileEntity
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - User Profile (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - User Profile (Wear OS)','User account details including Display Name, DOB, Gender, and Join Date. Parsed from FitbitProfileEntity table.')
+ report.add_script()
+ data_headers = ('Full Name', 'Display Name', 'Email', 'Gender', 'DOB', 'Height', 'Weight', 'Member Since', 'User ID')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - User Profile (Wear OS)')
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Profile: {e}')
+
+ # 2. Activity / Workout History
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ name as "Activity Name",
+ duration/1000/60 as "Duration (Mins)",
+ distance,
+ distanceUnit,
+ steps,
+ calories,
+ averageHeartRate,
+ elevationGain,
+ activeZoneMinutes,
+ logId
+ FROM ActivityExerciseEntity
+ ORDER BY startTime DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Activity History (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Activity History (Wear OS)', 'Log of exercises and activities including duration, distance, and calories. Parsed from ActivityExerciseEntity table.')
+ report.add_script()
+ data_headers = ('Start Time', 'Activity Name', 'Duration (Mins)', 'Distance', 'Unit', 'Steps', 'Calories', 'Avg HR', 'Elevation', 'AZM', 'Log ID')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Activity History (Wear OS)')
+ timeline(report_folder, 'Fitbit - Activity History (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Activity History: {e}')
+
+ # 3. DAILY Summaries
+ try:
+ cursor.execute('''
+ SELECT
+ date,
+ totalMinutesMoving,
+ totalMinutesSedentary,
+ longestDuration as "Longest Sedentary Duration",
+ longestStart as "Longest Sedentary Start Time"
+ FROM SedentaryDataEntity
+ ORDER BY date DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Daily Activity (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Daily Activity (Wear OS)','Daily summary of total minutes moved vs. sedentary minutes. Parsed from SedentaryDataEntity table.')
+ report.add_script()
+ data_headers = ('Date', 'Total Moving Mins', 'Total Sedentary Mins', 'Longest Sedentary Duration (Mins)', 'Longest Sedentary Start Time')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Daily Activity (Wear OS)')
+ timeline(report_folder, 'Fitbit - Daily Activity (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Daily Summaries: {e}')
+
+ # 4. HOURLY Steps (Flattened JSON)
+ try:
+ cursor.execute('''
+ SELECT
+ date,
+ hourlyData
+ FROM SedentaryDataEntity
+ ORDER BY date DESC
+ ''')
+ all_rows = cursor.fetchall()
+ hourly_data_list = []
+ for row in all_rows:
+ date_str = row[0]
+ json_data = row[1]
+ if json_data:
+ try:
+ parsed_json = json.loads(json_data)
+ hourly_list = parsed_json.get('hourlyData', [])
+ for hour_entry in hourly_list:
+ time_str = hour_entry.get('dateTime', '')
+ steps_str = hour_entry.get('steps', '0')
+ full_timestamp = f"{date_str} {time_str}"
+ hourly_data_list.append((full_timestamp, date_str, time_str, steps_str))
+ except ValueError:
+ pass
+ if len(hourly_data_list) > 0:
+ report = ArtifactHtmlReport('Fitbit - Hourly Steps (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Hourly Steps (Wear OS)','Granular hourly step counts parsed from JSON blobs stored in the SedentaryDataEntity table.')
+ report.add_script()
+ data_headers = ('Full Timestamp', 'Date', 'Time', 'Steps')
+ report.write_artifact_data_table(data_headers, hourly_data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, hourly_data_list, 'Fitbit - Hourly Steps (Wear OS)')
+ timeline(report_folder, 'Fitbit - Hourly Steps (Wear OS)', hourly_data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Hourly Steps: {e}')
+
+ # 5. Sleep Logs
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Sleep Start",
+ datetime(endTime/1000, 'unixepoch') as "Sleep End",
+ dateOfSleep,
+ minutesAsleep,
+ minutesAwake,
+ minutesToFallAsleep,
+ minutesAfterWakeup,
+ type as "Sleep Type",
+ isMainSleep
+ FROM FitbitSleepDateEntity
+ ORDER BY startTime DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Sleep Logs (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Sleep Logs (Wear OS)','Main sleep session logs including start/end times and sleep stages. Parsed from FitbitSleepDateEntity table.')
+ report.add_script()
+ data_headers = ('Sleep Start', 'Sleep End', 'Date of Sleep', 'Mins Asleep', 'Mins Awake', 'Time to Fall Asleep', 'Time After Wakeup', 'Type', 'Is Main Sleep')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep Logs (Wear OS)')
+ timeline(report_folder, 'Fitbit - Sleep Logs (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Sleep Logs: {e}')
+
+ db.close()
+
+ # --- PROCESS PASSIVE_STATS.DB ---
+ elif file_found.endswith('passive_stats.db'):
+ db = open_sqlite_db_readonly(file_found)
+ cursor = db.cursor()
+
+ # 1. Exercise Summaries
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Start Time",
+ sessionId,
+ exerciseTypeId as "Activity Type ID",
+ totalDistanceMm / 1000000.0 as "Distance (KM)",
+ steps,
+ caloriesBurned,
+ avgHeartRate,
+ elevationGainFt
+ FROM ExerciseSummaryEntity
+ ORDER BY time DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Workouts (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Workouts (Wear OS)','Workout summaries including steps, calories etc. Parsed from ExerciseSummaryEntity table.')
+ report.add_script()
+ data_headers = ('Start Time', 'Session ID', 'Activity Type ID', 'Distance (KM)', 'Steps', 'Calories', 'Avg HR', 'Elevation (ft)')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Workouts (Wear OS)')
+ timeline(report_folder, 'Fitbit - Workouts (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Workouts: {e}')
+
+ # 2. Exercise GPS
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Timestamp",
+ latitude,
+ longitude,
+ altitude,
+ speed,
+ bearing,
+ estimatedPositionError
+ FROM ExerciseGpsEntity
+ ORDER BY time ASC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - GPS Trackpoints (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - GPS Trackpoints (Wear OS)','GPS trackpoints recorded during exercises. Parsed from ExerciseGpsEntity table.')
+ report.add_script()
+ data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Altitude', 'Speed', 'Bearing', 'Est. Error')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - GPS Trackpoints (Wear OS)')
+ timeline(report_folder, 'Fitbit - GPS Trackpoints (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit GPS: {e}')
+
+ # 3. Heart Rate Stats
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ datetime(endTime/1000, 'unixepoch') as "End Time",
+ value as "BPM",
+ accuracy
+ FROM HeartRateStatEntity
+ ORDER BY startTime DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Heart Rate Stats (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Heart Rate Stats (Wear OS)','Heart rate statistics (BPM). Parsed from HeartRateStatEntity table.')
+ report.add_script()
+ data_headers = ('Start Time', 'End Time', 'BPM', 'Accuracy')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Heart Rate Stats (Wear OS)')
+ timeline(report_folder, 'Fitbit - Heart Rate Stats (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit HR Stats: {e}')
+
+ # 4. Live Pace
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(timeSeconds/1000, 'unixepoch') as "Timestamp",
+ sessionId,
+ statType,
+ value
+ FROM LivePaceEntity
+ ORDER BY timeSeconds DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Live Pace (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Live Pace (Wear OS)','Live pace statistics during workouts. Parsed from LivePaceEntity table.')
+ report.add_script()
+ data_headers = ('Timestamp', 'Session ID', 'Stat Type', 'Value')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Live Pace (Wear OS)')
+ timeline(report_folder, 'Fitbit - Live Pace (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Live Pace: {e}')
+
+ # 5. Sleep Periods
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(sleepStartTime/1000, 'unixepoch') as "Sleep Start",
+ datetime(sleepEndTime/1000, 'unixepoch') as "Sleep End",
+ (sleepEndTime - sleepStartTime)/1000/60 as "Duration (Mins)"
+ FROM LocalSleepPeriodsEntity
+ ORDER BY sleepStartTime DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Sleep (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Sleep (Wear OS)', 'Raw sleep periods detected by device. Parsed from LocalSleepPeriodsEntity table.')
+ report.add_script()
+ data_headers = ('Sleep Start', 'Sleep End', 'Duration (Mins)')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep (Wear OS)')
+ timeline(report_folder, 'Fitbit - Sleep (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Sleep: {e}')
+
+ # 6. Active Zone Minutes
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(startTime/1000, 'unixepoch') as "Start Time",
+ datetime(endTime/1000, 'unixepoch') as "End Time",
+ activeZone,
+ value as "Points",
+ lastBpm
+ FROM PassiveAzmEntity
+ ORDER BY startTime DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Active Zones (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Active Zones (Wear OS)','Minutes spent in elevated heart rate zones (Fat Burn = 1x, Cardio/Peak = 2x). Indicates physical exertion intensity. Parsed from PassiveAzmEntity table.')
+ report.add_script()
+ data_headers = ('Start Time', 'End Time', 'Zone ID', 'Points', 'Last BPM')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Active Zones (Wear OS)')
+ timeline(report_folder, 'Fitbit - Active Zones (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit AZM: {e}')
+
+ # 7. Exercise Splits
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(time/1000, 'unixepoch') as "Split Time",
+ sessionId,
+ avgPaceMilliSecPerKm / 1000 / 60.0 as "Avg Pace (Min/Km)",
+ avgHeartRate,
+ steps,
+ caloriesBurned
+ FROM ExerciseSplitAnnotationEntity
+ ORDER BY time ASC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Workout Splits (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Workout Splits (Wear OS)','Performance metrics (Pace, HR, Steps). Parsed from ExerciseSplitAnnotationEntity table.')
+ report.add_script()
+ data_headers = ('Split Time', 'Session ID', 'Avg Pace (Min/Km)', 'Avg HR', 'Steps', 'Calories')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Workout Splits (Wear OS)')
+ timeline(report_folder, 'Fitbit - Workout Splits (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Splits: {e}')
+
+ # 8. Opaque HR
+ try:
+ cursor.execute('''
+ SELECT
+ datetime(timestamp/1000, 'unixepoch') as "Timestamp",
+ baseHeartRate as "Base HR",
+ confidence as "Confidence (0-3)"
+ FROM OpaqueHeartRateEntity
+ ORDER BY timestamp DESC
+ ''')
+ all_rows = cursor.fetchall()
+ if len(all_rows) > 0:
+ report = ArtifactHtmlReport('Fitbit - Opaque HR (Wear OS)')
+ report.start_artifact_report(report_folder, 'Fitbit - Opaque HR (Wear OS)','Raw heart rate sensor readings. Parsed from OpaqueHeartRateEntity table.')
+ report.add_script()
+ data_headers = ('Timestamp', 'Base HR', 'Confidence')
+ data_list = []
+ for row in all_rows:
+ data_list.append((row[0], row[1], row[2]))
+ report.write_artifact_data_table(data_headers, data_list, file_found)
+ report.end_artifact_report()
+ tsv(report_folder, data_headers, data_list, 'Fitbit - Opaque HR (Wear OS)')
+ timeline(report_folder, 'Fitbit - Opaque HR (Wear OS)', data_list, data_headers)
+ except Exception as e:
+ logfunc(f'Error parsing Fitbit Opaque HR: {e}')
+
+ db.close()
\ No newline at end of file
diff --git a/scripts/artifacts/fitbitUserDb.py b/scripts/artifacts/fitbitUserDb.py
deleted file mode 100644
index c324a50c..00000000
--- a/scripts/artifacts/fitbitUserDb.py
+++ /dev/null
@@ -1,251 +0,0 @@
-# Module Description: Parses Fitbit User DB from Wear OS devices to extract user profile, activity history, daily summaries, hourly steps, and sleep logs.
-# Author: ganeshbs17
-# Date: 2025-01-09
-# Artifact version: 1.0.4
-# Requirements: none
-
-import json
-
-from scripts.artifact_report import ArtifactHtmlReport
-from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
-
-# pylint: disable=broad-exception-caught
-def get_fitbit_user_db(files_found, report_folder, _seeker, _wrap_text):
-
- source_db = ''
- for file_found in files_found:
- if file_found.endswith('user.db'):
- source_db = file_found
- break
-
- if source_db:
- db = open_sqlite_db_readonly(source_db)
- cursor = db.cursor()
-
- # -----------------------------------------------------------------------
- # 1. User Profile
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- fullName,
- displayName,
- email,
- gender,
- dateOfBirth,
- height,
- weight,
- memberSince,
- userId
- FROM FitbitProfileEntity
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - User Profile')
- report.start_artifact_report(report_folder, 'Fitbit - User Profile')
- report.add_script()
-
- data_headers = ('Full Name', 'Display Name', 'Email', 'Gender', 'DOB', 'Height', 'Weight', 'Member Since', 'User ID')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - User Profile')
- else:
- logfunc('No Fitbit User Profile found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Profile: {e}')
-
- # -----------------------------------------------------------------------
- # 2. Activity / Workout History
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(startTime/1000, 'unixepoch') as "Start Time",
- name as "Activity Name",
- duration/1000/60 as "Duration (Mins)",
- distance,
- distanceUnit,
- steps,
- calories,
- averageHeartRate,
- elevationGain,
- activeZoneMinutes,
- logId
- FROM ActivityExerciseEntity
- ORDER BY startTime DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Activity History')
- report.start_artifact_report(report_folder, 'Fitbit - Activity History')
- report.add_script()
-
- data_headers = ('Start Time', 'Activity Name', 'Duration (Mins)', 'Distance', 'Unit', 'Steps', 'Calories', 'Avg HR', 'Elevation', 'AZM', 'Log ID')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Activity History')
- timeline(report_folder, 'Fitbit - Activity History', data_list, data_headers)
- else:
- logfunc('No Fitbit Activity History found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Activity History: {e}')
-
- # -----------------------------------------------------------------------
- # 3. DAILY Summaries (One row per day)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- date,
- totalMinutesMoving,
- totalMinutesSedentary,
- longestDuration as "Longest Sedentary Duration",
- longestStart as "Longest Sedentary Start Time"
- FROM SedentaryDataEntity
- ORDER BY date DESC
- ''')
-
- daily_rows = cursor.fetchall()
-
- if len(daily_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Daily Activity Summary')
- report.start_artifact_report(report_folder, 'Fitbit - Daily Activity Summary')
- report.add_script()
-
- data_headers = ('Date', 'Total Moving Mins', 'Total Sedentary Mins', 'Longest Sedentary Duration (Mins)', 'Longest Sedentary Start Time')
- data_list = []
- for row in daily_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Daily Activity Summary')
- # Timeline optional for Daily stats, but good for overview
- timeline(report_folder, 'Fitbit - Daily Activity Summary', data_list, data_headers)
- else:
- logfunc('No Fitbit Daily Summaries found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Daily Summaries: {e}')
-
- # -----------------------------------------------------------------------
- # 4. HOURLY Steps (Flattened JSON)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- date,
- hourlyData
- FROM SedentaryDataEntity
- ORDER BY date DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- hourly_data_list = []
-
- for row in all_rows:
- date_str = row[0]
- json_data = row[1]
-
- if json_data:
- try:
- parsed_json = json.loads(json_data)
- hourly_list = parsed_json.get('hourlyData', [])
-
- for hour_entry in hourly_list:
- time_str = hour_entry.get('dateTime', '')
- steps_str = hour_entry.get('steps', '0')
-
- # Create a full timestamp for the timeline
- full_timestamp = f"{date_str} {time_str}"
-
- hourly_data_list.append((full_timestamp, date_str, time_str, steps_str))
-
- except ValueError:
- pass
-
- if len(hourly_data_list) > 0:
- report = ArtifactHtmlReport('Fitbit - Hourly Steps')
- report.start_artifact_report(report_folder, 'Fitbit - Hourly Steps')
- report.add_script()
-
- data_headers = ('Full Timestamp', 'Date', 'Time', 'Steps')
-
- report.write_artifact_data_table(data_headers, hourly_data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, hourly_data_list, 'Fitbit - Hourly Steps')
- timeline(report_folder, 'Fitbit - Hourly Steps', hourly_data_list, data_headers)
- else:
- logfunc('No Fitbit Hourly Steps found')
-
- except Exception as e:
- logfunc(f'Error parsing Fitbit Hourly Steps: {e}')
-
- # -----------------------------------------------------------------------
- # 5. Sleep Logs
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(startTime/1000, 'unixepoch') as "Sleep Start",
- datetime(endTime/1000, 'unixepoch') as "Sleep End",
- dateOfSleep,
- minutesAsleep,
- minutesAwake,
- minutesToFallAsleep,
- minutesAfterWakeup,
- type as "Sleep Type",
- isMainSleep
- FROM FitbitSleepDateEntity
- ORDER BY startTime DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Sleep Logs')
- report.start_artifact_report(report_folder, 'Fitbit - Sleep Logs')
- report.add_script()
-
- data_headers = ('Sleep Start', 'Sleep End', 'Date of Sleep', 'Mins Asleep', 'Mins Awake', 'Time to Fall Asleep', 'Time After Wakeup', 'Type', 'Is Main Sleep')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep Logs')
- timeline(report_folder, 'Fitbit - Sleep Logs', data_list, data_headers)
- else:
- logfunc('No Fitbit Sleep Logs found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Sleep Logs: {e}')
-
- db.close()
- else:
- logfunc('Fitbit user.db not found')
-
-__artifacts__ = {
- "FitbitUserDb": (
- "Fitbit",
- ('*/databases/user.db'),
- get_fitbit_user_db)
-
-}
diff --git a/scripts/artifacts/fitbit_passive_stats.py b/scripts/artifacts/fitbit_passive_stats.py
deleted file mode 100644
index a0e0525f..00000000
--- a/scripts/artifacts/fitbit_passive_stats.py
+++ /dev/null
@@ -1,331 +0,0 @@
-# Module Description: Parses Fitbit Passive Stats DB from Wear OS
-# Author: ganeshbs17
-# Date: 2025-01-09
-# Artifact version: 1.0.2
-# Requirements: none
-
-from scripts.artifact_report import ArtifactHtmlReport
-from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
-
-# pylint: disable=broad-exception-caught
-def get_fitbit_passive_stats(files_found, report_folder, _seeker, _wrap_text):
-
- # Locate the specific database file
- source_db = ''
- for file_found in files_found:
- if file_found.endswith('passive_stats.db'):
- source_db = file_found
- break
-
- if source_db:
- db = open_sqlite_db_readonly(source_db)
- cursor = db.cursor()
-
- # -----------------------------------------------------------------------
- # 1. Exercise Summaries
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(time/1000, 'unixepoch') as "Start Time",
- sessionId,
- exerciseTypeId as "Activity Type ID",
- totalDistanceMm / 1000000.0 as "Distance (KM)",
- steps,
- caloriesBurned,
- avgHeartRate,
- elevationGainFt
- FROM ExerciseSummaryEntity
- ORDER BY time DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Workouts')
- report.start_artifact_report(report_folder, 'Fitbit - Workouts')
- report.add_script()
-
- data_headers = ('Start Time', 'Session ID', 'Activity Type ID', 'Distance (KM)', 'Steps', 'Calories', 'Avg HR', 'Elevation (ft)')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Workouts')
- timeline(report_folder, 'Fitbit - Workouts', data_list, data_headers)
- else:
- logfunc('No Fitbit Workout Summaries found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Workouts: {e}')
-
- # -----------------------------------------------------------------------
- # 2. Exercise GPS Data (Track Points)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(time/1000, 'unixepoch') as "Timestamp",
- latitude,
- longitude,
- altitude,
- speed,
- bearing,
- estimatedPositionError
- FROM ExerciseGpsEntity
- ORDER BY time ASC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - GPS Trackpoints')
- report.start_artifact_report(report_folder, 'Fitbit - GPS Trackpoints')
- report.add_script()
-
- data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Altitude', 'Speed', 'Bearing', 'Est. Error')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - GPS Trackpoints')
- timeline(report_folder, 'Fitbit - GPS Trackpoints', data_list, data_headers)
- else:
- logfunc('No Fitbit GPS data found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit GPS: {e}')
-
- # -----------------------------------------------------------------------
- # 3. Heart Rate Stats
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(startTime/1000, 'unixepoch') as "Start Time",
- datetime(endTime/1000, 'unixepoch') as "End Time",
- value as "BPM",
- accuracy
- FROM HeartRateStatEntity
- ORDER BY startTime DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Heart Rate Stats')
- report.start_artifact_report(report_folder, 'Fitbit - Heart Rate Stats')
- report.add_script()
-
- data_headers = ('Start Time', 'End Time', 'BPM', 'Accuracy')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Heart Rate Stats')
- timeline(report_folder, 'Fitbit - Heart Rate Stats', data_list, data_headers)
- else:
- logfunc('No Fitbit Heart Rate Stats found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit HR Stats: {e}')
-
- # -----------------------------------------------------------------------
- # 4. Live Pace
- # -----------------------------------------------------------------------
- try:
- # Note: timeSeconds column contains milliseconds in newer app versions
- cursor.execute('''
- SELECT
- datetime(timeSeconds/1000, 'unixepoch') as "Timestamp",
- sessionId,
- statType,
- value
- FROM LivePaceEntity
- ORDER BY timeSeconds DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Live Pace')
- report.start_artifact_report(report_folder, 'Fitbit - Live Pace')
- report.add_script()
-
- data_headers = ('Timestamp', 'Session ID', 'Stat Type', 'Value')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Live Pace')
- timeline(report_folder, 'Fitbit - Live Pace', data_list, data_headers)
- else:
- logfunc('No Fitbit Live Pace data found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Live Pace: {e}')
-
- # -----------------------------------------------------------------------
- # 5. Sleep Periods
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(sleepStartTime/1000, 'unixepoch') as "Sleep Start",
- datetime(sleepEndTime/1000, 'unixepoch') as "Sleep End",
- (sleepEndTime - sleepStartTime)/1000/60 as "Duration (Mins)"
- FROM LocalSleepPeriodsEntity
- ORDER BY sleepStartTime DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Sleep')
- report.start_artifact_report(report_folder, 'Fitbit - Sleep')
- report.add_script()
-
- data_headers = ('Sleep Start', 'Sleep End', 'Duration (Mins)')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Sleep')
- timeline(report_folder, 'Fitbit - Sleep', data_list, data_headers)
- else:
- logfunc('No Fitbit Sleep data found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Sleep: {e}')
-
- # -----------------------------------------------------------------------
- # 6. Active Zone Minutes (AZM)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(startTime/1000, 'unixepoch') as "Start Time",
- datetime(endTime/1000, 'unixepoch') as "End Time",
- activeZone,
- value as "Points",
- lastBpm
- FROM PassiveAzmEntity
- ORDER BY startTime DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Active Zones')
- report.start_artifact_report(report_folder, 'Fitbit - Active Zones')
- report.add_script()
-
- data_headers = ('Start Time', 'End Time', 'Zone ID', 'Points', 'Last BPM')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Active Zones')
- timeline(report_folder, 'Fitbit - Active Zones', data_list, data_headers)
- else:
- logfunc('No Fitbit AZM data found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit AZM: {e}')
-
- # -----------------------------------------------------------------------
- # 7. Exercise Splits (Pace/Km)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(time/1000, 'unixepoch') as "Split Time",
- sessionId,
- avgPaceMilliSecPerKm / 1000 / 60.0 as "Avg Pace (Min/Km)",
- avgHeartRate,
- steps,
- caloriesBurned
- FROM ExerciseSplitAnnotationEntity
- ORDER BY time ASC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Workout Splits')
- report.start_artifact_report(report_folder, 'Fitbit - Workout Splits')
- report.add_script()
-
- data_headers = ('Split Time', 'Session ID', 'Avg Pace (Min/Km)', 'Avg HR', 'Steps', 'Calories')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Workout Splits')
- timeline(report_folder, 'Fitbit - Workout Splits', data_list, data_headers)
- else:
- logfunc('No Fitbit Splits found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Splits: {e}')
-
- # -----------------------------------------------------------------------
- # 8. Opaque Heart Rate (Raw Sensor Data)
- # -----------------------------------------------------------------------
- try:
- cursor.execute('''
- SELECT
- datetime(timestamp/1000, 'unixepoch') as "Timestamp",
- baseHeartRate as "Base HR",
- confidence as "Confidence (0-3)"
- FROM OpaqueHeartRateEntity
- ORDER BY timestamp DESC
- ''')
-
- all_rows = cursor.fetchall()
-
- if len(all_rows) > 0:
- report = ArtifactHtmlReport('Fitbit - Opaque HR')
- report.start_artifact_report(report_folder, 'Fitbit - Opaque HR')
- report.add_script()
-
- data_headers = ('Timestamp', 'Base HR', 'Confidence')
- data_list = []
- for row in all_rows:
- data_list.append((row[0], row[1], row[2]))
-
- report.write_artifact_data_table(data_headers, data_list, source_db)
- report.end_artifact_report()
-
- tsv(report_folder, data_headers, data_list, 'Fitbit - Opaque HR')
- timeline(report_folder, 'Fitbit - Opaque HR', data_list, data_headers)
- else:
- logfunc('No Fitbit Opaque HR data found')
- except Exception as e:
- logfunc(f'Error parsing Fitbit Opaque HR: {e}')
-
- db.close()
- else:
- logfunc('Fitbit passive_stats.db not found')
-
-__artifacts__ = {
- "FitbitPassiveStats": (
- "Fitbit",
- ('*/databases/passive_stats.db'),
- get_fitbit_passive_stats)
-}
-
From ca6f7d5f8619a5b1e542795bd7d4a8117634d22c Mon Sep 17 00:00:00 2001
From: Ganesh <65601315+ganeshbs17@users.noreply.github.com>
Date: Tue, 13 Jan 2026 17:38:11 +0530
Subject: [PATCH 7/9] Update fitbit.py
Add map visualization to report
---
scripts/artifacts/fitbit.py | 51 ++++++++++++++++++++++++++++++++++---
1 file changed, 48 insertions(+), 3 deletions(-)
diff --git a/scripts/artifacts/fitbit.py b/scripts/artifacts/fitbit.py
index 2e2ba5d8..4f75e1dc 100644
--- a/scripts/artifacts/fitbit.py
+++ b/scripts/artifacts/fitbit.py
@@ -1,6 +1,8 @@
# Module Description: Parses Fitbit data from Android (Phone) and Wear OS (Watch)
import json
+import folium
+import os
from datetime import datetime, timezone
from scripts.artifact_report import ArtifactHtmlReport
@@ -678,17 +680,60 @@ def get_fitbit_wearos(files_found, report_folder, seeker, wrap_text):
''')
all_rows = cursor.fetchall()
if len(all_rows) > 0:
+ # 1. Generate the standard text report first
report = ArtifactHtmlReport('Fitbit - GPS Trackpoints (Wear OS)')
- report.start_artifact_report(report_folder, 'Fitbit - GPS Trackpoints (Wear OS)','GPS trackpoints recorded during exercises. Parsed from ExerciseGpsEntity table.')
+ report.start_artifact_report(report_folder, 'Fitbit - GPS Trackpoints (Wear OS)', 'GPS Coordinates. click here to open in new tab')
report.add_script()
- data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Altitude', 'Speed', 'Bearing', 'Est. Error')
+
+ data_headers = ('Timestamp', 'Latitude', 'Longitude', 'Altitude', 'Speed', 'Est. Error')
data_list = []
+ points = []
+
for row in all_rows:
- data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
+ # Add to text report
+ data_list.append((row[0], row[1], row[2], row[3], row[4], row[6]))
+
+ # Add to Map Points (Filter out valid 0.0 or nulls if needed)
+ if row[1] and row[2]:
+ points.append((row[1], row[2]))
+
+ # ---------------------------------------------------------
+ # MAP GENERATION (Folium)
+ # ---------------------------------------------------------
+ if len(points) > 0:
+ try:
+ # Center map on the first point
+ m = folium.Map(location=points[0], zoom_start=13, tiles='OpenStreetMap')
+
+ # Add the route line
+ folium.PolyLine(points, color="red", weight=2.5, opacity=1).add_to(m)
+
+ # Add Start/End markers
+ folium.Marker(points[0], popup='Start', icon=folium.Icon(color='green', icon='play')).add_to(m)
+ folium.Marker(points[-1], popup='End', icon=folium.Icon(color='red', icon='stop')).add_to(m)
+
+ # Save HTML map to the report folder
+ map_filename = 'Fitbit_GPS_Map.html'
+ map_path = os.path.join(report_folder, map_filename)
+ m.save(map_path)
+
+ logfunc(f'Map generated: {map_path}')
+ except Exception as e:
+ logfunc(f'Error generating map: {str(e)}')
+ # ---------------------------------------------------------
+
report.write_artifact_data_table(data_headers, data_list, file_found)
+
+ # --- START: INJECT IFRAME AT BOTTOM ---
+ if len(points) > 0:
+ report.add_section_heading('Interactive Map Preview')
+ report.add_map(f'')
+ # --- END: INJECT IFRAME AT BOTTOM ---
+
report.end_artifact_report()
tsv(report_folder, data_headers, data_list, 'Fitbit - GPS Trackpoints (Wear OS)')
timeline(report_folder, 'Fitbit - GPS Trackpoints (Wear OS)', data_list, data_headers)
+ kmlgen(report_folder, 'Fitbit_GPS_WearOS', data_list, data_headers)
except Exception as e:
logfunc(f'Error parsing Fitbit GPS: {e}')
From 47e9cf3e9e90bb7ee3289c2228916296458db4d9 Mon Sep 17 00:00:00 2001
From: Ganesh Savant <65601315+ganeshbs17@users.noreply.github.com>
Date: Tue, 13 Jan 2026 20:29:50 +0530
Subject: [PATCH 8/9] Add Pylint workflow for Python code analysis
---
.github/workflows/pylint.yml | 23 +++++++++++++++++++++++
1 file changed, 23 insertions(+)
create mode 100644 .github/workflows/pylint.yml
diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml
new file mode 100644
index 00000000..c73e032c
--- /dev/null
+++ b/.github/workflows/pylint.yml
@@ -0,0 +1,23 @@
+name: Pylint
+
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.8", "3.9", "3.10"]
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install pylint
+ - name: Analysing the code with pylint
+ run: |
+ pylint $(git ls-files '*.py')
From 78c53d1ab09a0215ab1fc605e57a6bc1c0631860 Mon Sep 17 00:00:00 2001
From: Ganesh <65601315+ganeshbs17@users.noreply.github.com>
Date: Tue, 13 Jan 2026 21:32:53 +0530
Subject: [PATCH 9/9] Fix lint errors
---
.github/workflows/pylint.yml | 23 ----------------
scripts/artifacts/fitbit.py | 51 ++++++++++++++++++++----------------
2 files changed, 29 insertions(+), 45 deletions(-)
delete mode 100644 .github/workflows/pylint.yml
diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml
deleted file mode 100644
index c73e032c..00000000
--- a/.github/workflows/pylint.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: Pylint
-
-on: [push]
-
-jobs:
- build:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python-version: ["3.8", "3.9", "3.10"]
- steps:
- - uses: actions/checkout@v4
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
- with:
- python-version: ${{ matrix.python-version }}
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install pylint
- - name: Analysing the code with pylint
- run: |
- pylint $(git ls-files '*.py')
diff --git a/scripts/artifacts/fitbit.py b/scripts/artifacts/fitbit.py
index 4f75e1dc..685b9576 100644
--- a/scripts/artifacts/fitbit.py
+++ b/scripts/artifacts/fitbit.py
@@ -4,9 +4,8 @@
import folium
import os
-from datetime import datetime, timezone
from scripts.artifact_report import ArtifactHtmlReport
-from scripts.ilapfuncs import logfunc, tsv, kmlgen, timeline, is_platform_windows, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
+from scripts.ilapfuncs import logfunc, tsv, kmlgen, timeline, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
__artifacts_v2__ = {
"Fitbit": {
@@ -35,8 +34,16 @@
}
}
-def get_fitbit(files_found, report_folder, seeker, wrap_text):
+def get_fitbit(files_found, report_folder, _seeker, _wrap_text):
+ file_found_activity = ''
+ file_found_device = ''
+ file_found_exercise = ''
+ file_found_heart = ''
+ file_found_sleep = ''
+ file_found_social = ''
+ file_found_mobile = ''
+
data_list_activity = []
data_list_devices = []
data_list_exercises = []
@@ -313,10 +320,10 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_activity, file_found_activity)
report.end_artifact_report()
- tsvname = f'Fitbit Activity'
+ tsvname = 'Fitbit Activity'
tsv(report_folder, data_headers, data_list_activity, tsvname)
- tlactivity = f'Fitbit Activity'
+ tlactivity = 'Fitbit Activity'
timeline(report_folder, tlactivity, data_list_activity, data_headers)
else:
logfunc('No Fitbit Activity data available')
@@ -330,10 +337,10 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_devices, file_found_device)
report.end_artifact_report()
- tsvname = f'Fitbit Device Info'
+ tsvname = 'Fitbit Device Info'
tsv(report_folder, data_headers, data_list_devices, tsvname)
- tlactivity = f'Fitbit Device Info'
+ tlactivity = 'Fitbit Device Info'
timeline(report_folder, tlactivity, data_list_devices, data_headers)
else:
logfunc('No Fitbit Device Info data available')
@@ -347,13 +354,13 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_exercises, file_found_exercise)
report.end_artifact_report()
- tsvname = f'Fitbit Exercise'
+ tsvname = 'Fitbit Exercise'
tsv(report_folder, data_headers, data_list_exercises, tsvname)
- tlactivity = f'Fitbit Exercise'
+ tlactivity = 'Fitbit Exercise'
timeline(report_folder, tlactivity, data_list_exercises, data_headers)
else:
- logfunc(f'No Fitbit - Exercise data available')
+ logfunc('No Fitbit - Exercise data available')
if data_list_heart:
report = ArtifactHtmlReport('Fitbit Heart Rate Summary')
@@ -364,10 +371,10 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_heart, file_found_heart)
report.end_artifact_report()
- tsvname = f'Fitbit Heart Rate Summary'
+ tsvname = 'Fitbit Heart Rate Summary'
tsv(report_folder, data_headers, data_list_heart, tsvname)
- tlactivity = f'Fitbit Heart Rate Summary'
+ tlactivity = 'Fitbit Heart Rate Summary'
timeline(report_folder, tlactivity, data_list_heart, data_headers)
else:
logfunc('No Fitbit Heart Rate Summary data available')
@@ -381,10 +388,10 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_sleep_detail, file_found_sleep)
report.end_artifact_report()
- tsvname = f'Fitbit Sleep Detail'
+ tsvname = 'Fitbit Sleep Detail'
tsv(report_folder, data_headers, data_list_sleep_detail, tsvname)
- tlactivity = f'Fitbit Sleep Detail'
+ tlactivity = 'Fitbit Sleep Detail'
timeline(report_folder, tlactivity, data_list_sleep_detail, data_headers)
else:
logfunc('No Fitbit Sleep Detail data available')
@@ -398,7 +405,7 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_sleep_summary, file_found_sleep)
report.end_artifact_report()
- tsvname = f'Fitbit Sleep Summary'
+ tsvname = 'Fitbit Sleep Summary'
tsv(report_folder, data_headers, data_list_sleep_summary, tsvname)
else:
logfunc('No Fitbit Sleep Summary data available')
@@ -412,7 +419,7 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_friends, file_found_social)
report.end_artifact_report()
- tsvname = f'Fitbit Friends'
+ tsvname = 'Fitbit Friends'
tsv(report_folder, data_headers, data_list_friends, tsvname)
else:
@@ -427,10 +434,10 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_user, file_found_social)
report.end_artifact_report()
- tsvname = f'Fitbit User Profile'
+ tsvname = 'Fitbit User Profile'
tsv(report_folder, data_headers, data_list_user, tsvname)
- tlactivity = f'Fitbit User Profile'
+ tlactivity = 'Fitbit User Profile'
timeline(report_folder, tlactivity, data_list_user, data_headers)
else:
@@ -445,17 +452,17 @@ def get_fitbit(files_found, report_folder, seeker, wrap_text):
report.write_artifact_data_table(data_headers, data_list_steps, file_found_mobile)
report.end_artifact_report()
- tsvname = f'Fitbit Steps'
+ tsvname = 'Fitbit Steps'
tsv(report_folder, data_headers, data_list_steps, tsvname)
- tlactivity = f'Fitbit Steps'
+ tlactivity = 'Fitbit Steps'
timeline(report_folder, tlactivity, data_list_steps, data_headers)
else:
logfunc('No Fitbit Steps data available')
# pylint: disable=broad-exception-caught
-def get_fitbit_wearos(files_found, report_folder, seeker, wrap_text):
+def get_fitbit_wearos(files_found, report_folder, _seeker, _wrap_text):
for file_found in files_found:
file_found = str(file_found)
@@ -727,7 +734,7 @@ def get_fitbit_wearos(files_found, report_folder, seeker, wrap_text):
# --- START: INJECT IFRAME AT BOTTOM ---
if len(points) > 0:
report.add_section_heading('Interactive Map Preview')
- report.add_map(f'')
+ report.add_map('')
# --- END: INJECT IFRAME AT BOTTOM ---
report.end_artifact_report()