Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 42 additions & 20 deletions scripts/artifacts/parsecdCache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,49 +8,71 @@
"requirements": "none",
"category": "Spotlight Searches",
"notes": "",
"paths": ('**/EngagedCompletions/Cache.db*'),
"output_types": "standard"
"paths": ("**/EngagedCompletions/Cache.db*"),
"output_types": "standard",
}
}


from scripts.ilapfuncs import open_sqlite_db_readonly, convert_ts_human_to_utc
from scripts.ilapfuncs import artifact_processor, convert_utc_human_to_timezone


@artifact_processor
def get_parseCDCache(files_found, report_folder, seeker, wrap_text, timezone_offset):

data_list = []
report_file = 'Unknown'
report_file = "Unknown"
has_score = False
for file_found in files_found:
file_found = str(file_found)
if not file_found.endswith('.db'):
continue # Skip all other files
if not file_found.endswith(".db"):
continue # Skip all other files
report_file = file_found
db = open_sqlite_db_readonly(file_found)

cursor = db.cursor()
cursor.execute('''
select
datetime(engagement_date + 978307200,'unixepoch') as engagement_date,
input,
completion,
transformed,
score
FROM completion_cache_engagement
''')

cursor.execute("pragma table_info(completion_cache_engagement)")
cols = [row[1] for row in cursor.fetchall()]
has_score = "score" in cols

select_cols = [
"datetime(engagement_date + 978307200,'unixepoch') as engagement_date",
"input",
"completion",
"transformed",
]
if has_score:
select_cols.append("score")

qry = f"""
select {", ".join(select_cols)}
from completion_cache_engagement
"""

cursor.execute(qry)
all_rows = cursor.fetchall()

for row in all_rows:
timestamp = row[0]
timestamp = convert_ts_human_to_utc(timestamp)
timestamp = convert_ts_human_to_utc(row[0])
timestamp = convert_utc_human_to_timezone(timestamp, timezone_offset)

data_list.append((timestamp, row[1], row[2], row[3], row[4]))
# Save data to temp variable to check whether has "score" column or no
temp = [timestamp, row[1], row[2], row[3]]
if has_score:
temp.append(row[4])

data_list.append(tuple(temp))

db.close()

data_headers = (('Engagement Date', 'datetime'), 'Input', 'Completion', 'Transformed', 'Score')
data_headers = [
("Engagement Date", "datetime"),
"Input",
"Completion",
"Transformed",
]
if has_score:
data_headers.append("Score")

return data_headers, data_list, report_file
return tuple(data_headers), data_list, report_file