diff --git a/.github/workflows/test_docker.yml b/.github/workflows/test_docker.yml index bb348b4..f1afb39 100644 --- a/.github/workflows/test_docker.yml +++ b/.github/workflows/test_docker.yml @@ -92,5 +92,5 @@ jobs: UnifiedLogReader.py UnifiedLogTestData/system_logs.logarchive2 UnifiedLogTestData/system_logs.logarchive2/timesync UnifiedLogTestData/system_logs.logarchive2/small /tmp/A head /tmp/A/logs.txt md5sum /tmp/A/logs.txt - echo expecting md5sum bb0a47a1ffd3594f82c288283016cdb2 - md5sum /tmp/A/logs.txt | grep bb0a47a1ffd3594f82c288283016cdb2 + echo expecting md5sum 4f5fb13d5d7cc3e5d33912fed0d9bb7d + md5sum /tmp/A/logs.txt | grep 4f5fb13d5d7cc3e5d33912fed0d9bb7d diff --git a/UnifiedLog/tracev3_file.py b/UnifiedLog/tracev3_file.py index 83b8cfe..1e7cdea 100644 --- a/UnifiedLog/tracev3_file.py +++ b/UnifiedLog/tracev3_file.py @@ -999,6 +999,8 @@ def _ParseStateChunkData(self, chunk_data, catalog, proc_info, logs, log_file_po ttl = 0 # FIX ME before some refactoring this probably was filled # type 1 does not have any strings, it is blank or random bytes + obj_type_str_1 = "" + obj_type_str_2 = "" if data_type != 1: obj_type_str_1 = self._ReadCString(chunk_data[56:120]) obj_type_str_2 = self._ReadCString(chunk_data[120:184]) @@ -1023,12 +1025,14 @@ def _ParseStateChunkData(self, chunk_data, catalog, proc_info, logs, log_file_po logger.exception('Problem reading plist from log @ 0x{:X} ct={}'.format(log_file_pos, ct)) elif data_type == 2: #custom object, not being read by log utility in many cases! + log_msg = f"{obj_type_str_1} {obj_type_str_2} data {binascii.hexlify(data).decode('utf8')}" logger.error('Did not read data of type {}, t1={}, t2={}, length=0x{:X} from log @ 0x{:X} ct={}'.format(data_type, obj_type_str_1, obj_type_str_2, data_len, log_file_pos, ct)) elif data_type == 3: # custom [Apple] #TODO - read non-plist data if obj_type_str_1 == 'location' and obj_type_str_2 == '_CLClientManagerStateTrackerState': log_msg = self._Read_CLClientManagerStateTrackerState(data) else: + log_msg = f"{obj_type_str_1} {obj_type_str_2} data {binascii.hexlify(data).decode('utf8')}" logger.error('Did not read data of type {}, t1={}, t2={}, length=0x{:X} from log @ 0x{:X} ct={}'.format(data_type, obj_type_str_1, obj_type_str_2, data_len, log_file_pos, ct)) else: @@ -1498,6 +1502,15 @@ def _handle_mask_hash(self, hit, data_item, log_file_pos, flags_width_precision, '@': _handle_object, 'P': _handle_custom_pointer, 'p': _handle_pointer, + '{public}': _handle_string, + '{private}': _handle_string, + '{public, signpost:metrics}': _handle_string, + '{signpost.description:attribute}': _handle_string, + '{public,signpost.description:attribute}': _handle_string, + '{public,signpost.telemetry:string1,name=ProxiedBundleID}': _handle_string, + '{public, name=pipeline, signpost.telemetry:string1}': _handle_string, + '{signpost.telemetry:string1,public}': _handle_string, + '{signpost.telemetry:string2,public}': _handle_string, '{signpost.telemetry:string1}': _handle_string, '{private, mask.hash}': _handle_mask_hash, }