Overhaul of mvt-ios modules

This commit is contained in:
Nex 2021-08-16 10:50:35 +02:00
parent 24d7187303
commit 96e4a9a4a4
24 changed files with 304 additions and 270 deletions

View File

@ -115,10 +115,10 @@ class Indicators:
# Then we just check the top level domain. # Then we just check the top level domain.
if final_url.top_level.lower() == ioc: if final_url.top_level.lower() == ioc:
if orig_url.is_shortened and orig_url.url != final_url.url: if orig_url.is_shortened and orig_url.url != final_url.url:
self.log.warning("Found a sub-domain matching a suspicious top level %s shortened as %s", self.log.warning("Found a sub-domain matching a known suspicious top level %s shortened as %s",
final_url.url, orig_url.url) final_url.url, orig_url.url)
else: else:
self.log.warning("Found a sub-domain matching a suspicious top level: %s", final_url.url) self.log.warning("Found a sub-domain matching a known suspicious top level: %s", final_url.url)
return True return True

View File

@ -26,8 +26,11 @@ class IOSExtraction(MVTModule):
self.is_fs_dump = False self.is_fs_dump = False
self.is_sysdiagnose = False self.is_sysdiagnose = False
def _is_database_malformed(self, file_path): def _recover_sqlite_db_if_needed(self, file_path):
# Check if the database is malformed. """Tries to recover a malformed database by running a .clone command.
:param file_path: Path to the malformed database file.
"""
# TODO: Find a better solution.
conn = sqlite3.connect(file_path) conn = sqlite3.connect(file_path)
cur = conn.cursor() cur = conn.cursor()
@ -40,19 +43,11 @@ class IOSExtraction(MVTModule):
finally: finally:
conn.close() conn.close()
return recover if not recover:
return
def _recover_database(self, file_path):
"""Tries to recover a malformed database by running a .clone command.
:param file_path: Path to the malformed database file.
"""
# TODO: Find a better solution.
self.log.info("Database at path %s is malformed. Trying to recover...", file_path) self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
if not os.path.exists(file_path):
return
if not shutil.which("sqlite3"): if not shutil.which("sqlite3"):
raise DatabaseCorruptedError("Unable to recover without sqlite3 binary. Please install sqlite3!") raise DatabaseCorruptedError("Unable to recover without sqlite3 binary. Please install sqlite3!")
if '"' in file_path: if '"' in file_path:
@ -107,7 +102,7 @@ class IOSExtraction(MVTModule):
return None return None
def _get_fs_files_from_pattern(self, root_paths): def _get_fs_files_from_patterns(self, root_paths):
for root_path in root_paths: for root_path in root_paths:
for found_path in glob.glob(os.path.join(self.base_folder, root_path)): for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
if not os.path.exists(found_path): if not os.path.exists(found_path):
@ -116,8 +111,11 @@ class IOSExtraction(MVTModule):
yield found_path yield found_path
def _find_ios_database(self, backup_ids=None, root_paths=[]): def _find_ios_database(self, backup_ids=None, root_paths=[]):
"""Try to locate the module's database file from either an iTunes """Try to locate a module's database file from either an iTunes
backup or a full filesystem dump. backup or a full filesystem dump. This is intended only for
modules that expect to work with a single SQLite database.
If a module requires to process multiple databases or files,
you should use the helper functions above.
:param backup_id: iTunes backup database file's ID (or hash). :param backup_id: iTunes backup database file's ID (or hash).
:param root_paths: Glob patterns for files to seek in filesystem dump. :param root_paths: Glob patterns for files to seek in filesystem dump.
""" """
@ -138,21 +136,14 @@ class IOSExtraction(MVTModule):
if not file_path or not os.path.exists(file_path): if not file_path or not os.path.exists(file_path):
# We reset the file_path. # We reset the file_path.
file_path = None file_path = None
for root_path in root_paths: for found_path in self._get_fs_files_from_patterns(root_paths):
for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
# If we find a valid path, we set file_path.
if os.path.exists(found_path):
file_path = found_path file_path = found_path
break break
# Otherwise, we reset the file_path again.
file_path = None
# If we do not find any, we fail. # If we do not find any, we fail.
if file_path: if file_path:
self.file_path = file_path self.file_path = file_path
else: else:
raise DatabaseNotFoundError("Unable to find the module's database file") raise DatabaseNotFoundError("Unable to find the module's database file")
if self._is_database_malformed(self.file_path): self._recover_sqlite_db_if_needed(self.file_path)
self._recover_database(self.file_path)

View File

@ -3,6 +3,8 @@
# Use of this software is governed by the MVT License 1.1 that can be found at # Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/ # https://license.mvt.re/1.1/
import sqlite3
from ..net_base import NetBase from ..net_base import NetBase
NETUSAGE_ROOT_PATHS = [ NETUSAGE_ROOT_PATHS = [
@ -21,8 +23,13 @@ class Netusage(NetBase):
log=log, results=results) log=log, results=results)
def run(self): def run(self):
self._find_ios_database(root_paths=NETUSAGE_ROOT_PATHS) for netusage_path in self._get_fs_files_from_patterns(NETUSAGE_ROOT_PATHS):
self.file_path = netusage_path
self.log.info("Found NetUsage database at path: %s", self.file_path) self.log.info("Found NetUsage database at path: %s", self.file_path)
try:
self._extract_net_data() self._extract_net_data()
except sqlite3.OperationalError as e:
self.log.info("Skipping this NetUsage database because it seems empty or malformed: %s", e)
continue
self._find_suspicious_processes() self._find_suspicious_processes()

View File

@ -39,50 +39,57 @@ class SafariFavicon(IOSExtraction):
if self.indicators.check_domain(result["url"]) or self.indicators.check_domain(result["icon_url"]): if self.indicators.check_domain(result["url"]) or self.indicators.check_domain(result["icon_url"]):
self.detected.append(result) self.detected.append(result)
def run(self): def _process_favicon_db(self, file_path):
self._find_ios_database(root_paths=SAFARI_FAVICON_ROOT_PATHS) conn = sqlite3.connect(file_path)
self.log.info("Found Safari favicon cache database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
# Fetch valid icon cache. # Fetch valid icon cache.
cur = conn.cursor() cur = conn.cursor()
cur.execute("""SELECT cur.execute("""
SELECT
page_url.url, page_url.url,
icon_info.url, icon_info.url,
icon_info.timestamp icon_info.timestamp
FROM page_url FROM page_url
JOIN icon_info ON page_url.uuid = icon_info.uuid JOIN icon_info ON page_url.uuid = icon_info.uuid
ORDER BY icon_info.timestamp;""") ORDER BY icon_info.timestamp;
""")
items = [] for row in cur:
for item in cur: self.results.append({
items.append({ "url": row[0],
"url": item[0], "icon_url": row[1],
"icon_url": item[1], "timestamp": row[2],
"timestamp": item[2], "isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[2])),
"type": "valid", "type": "valid",
"safari_favicon_db_path": file_path,
}) })
# Fetch icons from the rejected icons table. # Fetch icons from the rejected icons table.
cur.execute("""SELECT cur.execute("""
SELECT
page_url, page_url,
icon_url, icon_url,
timestamp timestamp
FROM rejected_resources ORDER BY timestamp;""") FROM rejected_resources ORDER BY timestamp;
""")
for item in cur: for row in cur:
items.append({ self.results.append({
"url": item[0], "url": row[0],
"icon_url": item[1], "icon_url": row[1],
"timestamp": item[2], "timestamp": row[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[2])), "isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"type": "rejected", "type": "rejected",
"safari_favicon_db_path": file_path,
}) })
cur.close() cur.close()
conn.close() conn.close()
self.log.info("Extracted a total of %d favicon records", len(items)) def run(self):
self.results = sorted(items, key=lambda item: item["isodate"]) for file_path in self._get_fs_files_from_patterns(SAFARI_FAVICON_ROOT_PATHS):
self.log.info("Found Safari favicon cache database at path: %s", file_path)
self._process_favicon_db(file_path)
self.log.info("Extracted a total of %d favicon records", len(self.results))
self.results = sorted(self.results, key=lambda x: x["isodate"])

View File

@ -32,7 +32,7 @@ class IOSVersionHistory(IOSExtraction):
} }
def run(self): def run(self):
for found_path in self._get_fs_files_from_pattern(IOS_ANALYTICS_JOURNAL_PATHS): for found_path in self._get_fs_files_from_patterns(IOS_ANALYTICS_JOURNAL_PATHS):
with open(found_path, "r") as analytics_log: with open(found_path, "r") as analytics_log:
log_line = json.loads(analytics_log.readline().strip()) log_line = json.loads(analytics_log.readline().strip())

View File

@ -23,7 +23,7 @@ class WebkitBase(IOSExtraction):
self.detected.append(item) self.detected.append(item)
def _process_webkit_folder(self, root_paths): def _process_webkit_folder(self, root_paths):
for found_path in self._get_fs_files_from_pattern(root_paths): for found_path in self._get_fs_files_from_patterns(root_paths):
key = os.path.relpath(found_path, self.base_folder) key = os.path.relpath(found_path, self.base_folder)
for name in os.listdir(found_path): for name in os.listdir(found_path):

View File

@ -34,7 +34,8 @@ class Calls(IOSExtraction):
} }
def run(self): def run(self):
self._find_ios_database(backup_ids=CALLS_BACKUP_IDS, root_paths=CALLS_ROOT_PATHS) self._find_ios_database(backup_ids=CALLS_BACKUP_IDS,
root_paths=CALLS_ROOT_PATHS)
self.log.info("Found Calls database at path: %s", self.file_path) self.log.info("Found Calls database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
@ -46,13 +47,13 @@ class Calls(IOSExtraction):
""") """)
names = [description[0] for description in cur.description] names = [description[0] for description in cur.description]
for entry in cur: for row in cur:
self.results.append({ self.results.append({
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(entry[0])), "isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"duration": entry[1], "duration": row[1],
"location": entry[2], "location": row[2],
"number": entry[3].decode("utf-8") if entry[3] and entry[3] is bytes else entry[3], "number": row[3].decode("utf-8") if row[3] and row[3] is bytes else row[3],
"provider": entry[4] "provider": row[4]
}) })
cur.close() cur.close()

View File

@ -45,14 +45,16 @@ class ChromeFavicon(IOSExtraction):
self.detected.append(result) self.detected.append(result)
def run(self): def run(self):
self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS, root_paths=CHROME_FAVICON_ROOT_PATHS) self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS,
root_paths=CHROME_FAVICON_ROOT_PATHS)
self.log.info("Found Chrome favicon cache database at path: %s", self.file_path) self.log.info("Found Chrome favicon cache database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
# Fetch icon cache # Fetch icon cache
cur = conn.cursor() cur = conn.cursor()
cur.execute("""SELECT cur.execute("""
SELECT
icon_mapping.page_url, icon_mapping.page_url,
favicons.url, favicons.url,
favicon_bitmaps.last_updated, favicon_bitmaps.last_updated,
@ -60,14 +62,15 @@ class ChromeFavicon(IOSExtraction):
FROM icon_mapping FROM icon_mapping
JOIN favicon_bitmaps ON icon_mapping.icon_id = favicon_bitmaps.icon_id JOIN favicon_bitmaps ON icon_mapping.icon_id = favicon_bitmaps.icon_id
JOIN favicons ON icon_mapping.icon_id = favicons.id JOIN favicons ON icon_mapping.icon_id = favicons.id
ORDER BY icon_mapping.id;""") ORDER BY icon_mapping.id;
""")
items = [] records = []
for item in cur: for row in cur:
last_timestamp = int(item[2]) or int(item[3]) last_timestamp = int(row[2]) or int(row[3])
items.append({ records.append({
"url": item[0], "url": row[0],
"icon_url": item[1], "icon_url": row[1],
"timestamp": last_timestamp, "timestamp": last_timestamp,
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(last_timestamp)), "isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(last_timestamp)),
}) })
@ -75,5 +78,5 @@ class ChromeFavicon(IOSExtraction):
cur.close() cur.close()
conn.close() conn.close()
self.log.info("Extracted a total of %d favicon records", len(items)) self.log.info("Extracted a total of %d favicon records", len(records))
self.results = sorted(items, key=lambda item: item["isodate"]) self.results = sorted(records, key=lambda row: row["isodate"])

View File

@ -45,7 +45,8 @@ class ChromeHistory(IOSExtraction):
self.detected.append(result) self.detected.append(result)
def run(self): def run(self):
self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS, root_paths=CHROME_HISTORY_ROOT_PATHS) self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS,
root_paths=CHROME_HISTORY_ROOT_PATHS)
self.log.info("Found Chrome history database at path: %s", self.file_path) self.log.info("Found Chrome history database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)

View File

@ -39,9 +39,9 @@ class Contacts(IOSExtraction):
""") """)
names = [description[0] for description in cur.description] names = [description[0] for description in cur.description]
for entry in cur: for row in cur:
new_contact = {} new_contact = {}
for index, value in enumerate(entry): for index, value in enumerate(row):
new_contact[names[index]] = value new_contact[names[index]] = value
self.results.append(new_contact) self.results.append(new_contact)
@ -49,4 +49,5 @@ class Contacts(IOSExtraction):
cur.close() cur.close()
conn.close() conn.close()
self.log.info("Extracted a total of %d contacts from the address book", len(self.results)) self.log.info("Extracted a total of %d contacts from the address book",
len(self.results))

View File

@ -39,11 +39,13 @@ class FirefoxFavicon(IOSExtraction):
return return
for result in self.results: for result in self.results:
if self.indicators.check_domain(result["url"]) or self.indicators.check_domain(result["history_url"]): if (self.indicators.check_domain(result.get("url", "")) or
self.indicators.check_domain(result.get("history_url", ""))):
self.detected.append(result) self.detected.append(result)
def run(self): def run(self):
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS, root_paths=FIREFOX_HISTORY_ROOT_PATHS) self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS,
root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self.log.info("Found Firefox favicon database at path: %s", self.file_path) self.log.info("Found Firefox favicon database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)

View File

@ -61,14 +61,14 @@ class FirefoxHistory(IOSExtraction):
WHERE visits.siteID = history.id; WHERE visits.siteID = history.id;
""") """)
for item in cur: for row in cur:
self.results.append({ self.results.append({
"id": item[0], "id": row[0],
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(item[1])), "isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(row[1])),
"url": item[2], "url": row[2],
"title": item[3], "title": row[3],
"i1000000s_local": item[4], "i1000000s_local": row[4],
"type": item[5] "type": row[5]
}) })
cur.close() cur.close()

View File

@ -39,19 +39,20 @@ class IDStatusCache(IOSExtraction):
return return
for result in self.results: for result in self.results:
if result["user"].startswith("mailto:"): if result.get("user", "").startswith("mailto:"):
email = result["user"][7:].strip("'") email = result["user"][7:].strip("'")
if self.indicators.check_email(email): if self.indicators.check_email(email):
self.detected.append(result) self.detected.append(result)
continue continue
if "\\x00\\x00" in result["user"]: if "\\x00\\x00" in result.get("user", ""):
self.log.warning("Found an ID Status Cache entry with suspicious patterns: %s", self.log.warning("Found an ID Status Cache entry with suspicious patterns: %s",
result["user"]) result.get("user"))
self.detected.append(result) self.detected.append(result)
def run(self): def run(self):
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS, root_paths=IDSTATUSCACHE_ROOT_PATHS) self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS,
root_paths=IDSTATUSCACHE_ROOT_PATHS)
self.log.info("Found IDStatusCache plist at path: %s", self.file_path) self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle: with open(self.file_path, "rb") as handle:
@ -78,7 +79,7 @@ class IDStatusCache(IOSExtraction):
entry_counter = collections.Counter([entry["user"] for entry in id_status_cache_entries]) entry_counter = collections.Counter([entry["user"] for entry in id_status_cache_entries])
for entry in id_status_cache_entries: for entry in id_status_cache_entries:
# Add total count of occurrences to the status cache entry # Add total count of occurrences to the status cache entry.
entry["occurrences"] = entry_counter[entry["user"]] entry["occurrences"] = entry_counter[entry["user"]]
self.results.append(entry) self.results.append(entry)

View File

@ -123,48 +123,49 @@ class InteractionC(IOSExtraction):
LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT
LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK; LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK;
""") """)
names = [description[0] for description in cur.description] names = [description[0] for description in cur.description]
for item in cur: for row in cur:
self.results.append({ self.results.append({
"start_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[0])), "start_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"end_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[1])), "end_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[1])),
"bundle_id": item[2], "bundle_id": row[2],
"account": item[3], "account": row[3],
"target_bundle_id": item[4], "target_bundle_id": row[4],
"direction": item[5], "direction": row[5],
"sender_display_name": item[6], "sender_display_name": row[6],
"sender_identifier": item[7], "sender_identifier": row[7],
"sender_personid": item[8], "sender_personid": row[8],
"recipient_display_name": item[9], "recipient_display_name": row[9],
"recipient_identifier": item[10], "recipient_identifier": row[10],
"recipient_personid": item[11], "recipient_personid": row[11],
"recipient_count": item[12], "recipient_count": row[12],
"domain_identifier": item[13], "domain_identifier": row[13],
"is_response": item[14], "is_response": row[14],
"content": item[15], "content": row[15],
"uti": item[16], "uti": row[16],
"content_url": item[17], "content_url": row[17],
"size": item[18], "size": row[18],
"photo_local_id": item[19], "photo_local_id": row[19],
"attachment_id": item[20], "attachment_id": row[20],
"cloud_id": item[21], "cloud_id": row[21],
"incoming_recipient_count": item[22], "incoming_recipient_count": row[22],
"incoming_sender_count": item[23], "incoming_sender_count": row[23],
"outgoing_recipient_count": item[24], "outgoing_recipient_count": row[24],
"interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[25])) if item[25] else None, "interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[25])) if row[25] else None,
"contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[26])) if item[26] else None, "contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[26])) if row[26] else None,
"first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[27])) if item[27] else None, "first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[27])) if row[27] else None,
"first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[28])) if item[28] else None, "first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[28])) if row[28] else None,
"first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[29])) if item[29] else None, "first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[29])) if row[29] else None,
"last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[30])) if item[30] else None, "last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[30])) if row[30] else None,
"last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[31])) if item[31] else None, "last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[31])) if row[31] else None,
"last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[32])) if item[32] else None, "last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[32])) if row[32] else None,
"custom_id": item[33], "custom_id": row[33],
"location_uuid": item[35], "location_uuid": row[35],
"group_name": item[36], "group_name": row[36],
"derivied_intent_id": item[37], "derivied_intent_id": row[37],
"table_id": item[38] "table_id": row[38]
}) })
cur.close() cur.close()

View File

@ -24,6 +24,7 @@ class LocationdClients(IOSExtraction):
super().__init__(file_path=file_path, base_folder=base_folder, super().__init__(file_path=file_path, base_folder=base_folder,
output_folder=output_folder, fast_mode=fast_mode, output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results) log=log, results=results)
self.timestamps = [ self.timestamps = [
"ConsumptionPeriodBegin", "ConsumptionPeriodBegin",
"ReceivingLocationInformationTimeStopped", "ReceivingLocationInformationTimeStopped",
@ -50,7 +51,8 @@ class LocationdClients(IOSExtraction):
return records return records
def run(self): def run(self):
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS, root_paths=LOCATIOND_ROOT_PATHS) self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS,
root_paths=LOCATIOND_ROOT_PATHS)
self.log.info("Found Locationd Clients plist at path: %s", self.file_path) self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle: with open(self.file_path, "rb") as handle:

View File

@ -23,7 +23,8 @@ class Datausage(NetBase):
log=log, results=results) log=log, results=results)
def run(self): def run(self):
self._find_ios_database(backup_ids=DATAUSAGE_BACKUP_IDS, root_paths=DATAUSAGE_ROOT_PATHS) self._find_ios_database(backup_ids=DATAUSAGE_BACKUP_IDS,
root_paths=DATAUSAGE_ROOT_PATHS)
self.log.info("Found DataUsage database at path: %s", self.file_path) self.log.info("Found DataUsage database at path: %s", self.file_path)
self._extract_net_data() self._extract_net_data()

View File

@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/ # https://license.mvt.re/1.1/
import io import io
import os
import plistlib import plistlib
import sqlite3 import sqlite3
@ -15,6 +16,7 @@ from ..base import IOSExtraction
SAFARI_BROWSER_STATE_BACKUP_IDS = [ SAFARI_BROWSER_STATE_BACKUP_IDS = [
"3a47b0981ed7c10f3e2800aa66bac96a3b5db28e", "3a47b0981ed7c10f3e2800aa66bac96a3b5db28e",
] ]
SAFARI_BROWSER_STATE_BACKUP_RELPATH = "Library/Safari/BrowserState.db"
SAFARI_BROWSER_STATE_ROOT_PATHS = [ SAFARI_BROWSER_STATE_ROOT_PATHS = [
"private/var/mobile/Library/Safari/BrowserState.db", "private/var/mobile/Library/Safari/BrowserState.db",
"private/var/mobile/Containers/Data/Application/*/Library/Safari/BrowserState.db", "private/var/mobile/Containers/Data/Application/*/Library/Safari/BrowserState.db",
@ -29,6 +31,8 @@ class SafariBrowserState(IOSExtraction):
output_folder=output_folder, fast_mode=fast_mode, output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results) log=log, results=results)
self._session_history_count = 0
def serialize(self, record): def serialize(self, record):
return { return {
"timestamp": record["last_viewed_timestamp"], "timestamp": record["last_viewed_timestamp"],
@ -53,16 +57,12 @@ class SafariBrowserState(IOSExtraction):
if "entry_url" in session_entry and self.indicators.check_domain(session_entry["entry_url"]): if "entry_url" in session_entry and self.indicators.check_domain(session_entry["entry_url"]):
self.detected.append(result) self.detected.append(result)
def run(self): def _process_browser_state_db(self, db_path):
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS, conn = sqlite3.connect(db_path)
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
self.log.info("Found Safari browser state database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
# Fetch valid icon cache.
cur = conn.cursor() cur = conn.cursor()
cur.execute("""SELECT cur.execute("""
SELECT
tabs.title, tabs.title,
tabs.url, tabs.url,
tabs.user_visible_url, tabs.user_visible_url,
@ -70,34 +70,43 @@ class SafariBrowserState(IOSExtraction):
tab_sessions.session_data tab_sessions.session_data
FROM tabs FROM tabs
JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid
ORDER BY tabs.last_viewed_time;""") ORDER BY tabs.last_viewed_time;
""")
session_history_count = 0 for row in cur:
for item in cur:
session_entries = [] session_entries = []
if item[4]: if row[4]:
# Skip a 4 byte header before the plist content. # Skip a 4 byte header before the plist content.
session_plist = item[4][4:] session_plist = row[4][4:]
session_data = plistlib.load(io.BytesIO(session_plist)) session_data = plistlib.load(io.BytesIO(session_plist))
session_data = keys_bytes_to_string(session_data) session_data = keys_bytes_to_string(session_data)
if "SessionHistoryEntries" in session_data["SessionHistory"]: if "SessionHistoryEntries" in session_data.get("SessionHistory", {}):
for session_entry in session_data["SessionHistory"]["SessionHistoryEntries"]: for session_entry in session_data["SessionHistory"].get("SessionHistoryEntries"):
session_history_count += 1 self._session_history_count += 1
session_entries.append({ session_entries.append({
"entry_title": session_entry["SessionHistoryEntryOriginalURL"], "entry_title": session_entry.get("SessionHistoryEntryOriginalURL"),
"entry_url": session_entry["SessionHistoryEntryURL"], "entry_url": session_entry.get("SessionHistoryEntryURL"),
"data_length": len(session_entry["SessionHistoryEntryData"]) if "SessionHistoryEntryData" in session_entry else 0, "data_length": len(session_entry.get("SessionHistoryEntryData")) if "SessionHistoryEntryData" in session_entry else 0,
}) })
self.results.append({ self.results.append({
"tab_title": item[0], "tab_title": row[0],
"tab_url": item[1], "tab_url": row[1],
"tab_visible_url": item[2], "tab_visible_url": row[2],
"last_viewed_timestamp": convert_timestamp_to_iso(convert_mactime_to_unix(item[3])), "last_viewed_timestamp": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"session_data": session_entries, "session_data": session_entries,
"safari_browser_state_db": os.path.relpath(db_path, self.base_folder),
}) })
def run(self):
# TODO: Is there really only one BrowserState.db in a device?
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS,
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
self.log.info("Extracted a total of %d tab records and %d session history entries", self.log.info("Extracted a total of %d tab records and %d session history entries",
len(self.results), session_history_count) len(self.results), self._session_history_count)

View File

@ -3,6 +3,7 @@
# Use of this software is governed by the MVT License 1.1 that can be found at # Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/ # https://license.mvt.re/1.1/
import os
import sqlite3 import sqlite3
from mvt.common.url import URL from mvt.common.url import URL
@ -10,10 +11,7 @@ from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from ..base import IOSExtraction from ..base import IOSExtraction
SAFARI_HISTORY_BACKUP_IDS = [ SAFARI_HISTORY_BACKUP_RELPATH = "Library/Safari/History.db"
"e74113c185fd8297e140cfcf9c99436c5cc06b57",
"1a0e7afc19d307da602ccdcece51af33afe92c53",
]
SAFARI_HISTORY_ROOT_PATHS = [ SAFARI_HISTORY_ROOT_PATHS = [
"private/var/mobile/Library/Safari/History.db", "private/var/mobile/Library/Safari/History.db",
"private/var/mobile/Containers/Data/Application/*/Library/Safari/History.db", "private/var/mobile/Containers/Data/Application/*/Library/Safari/History.db",
@ -81,11 +79,8 @@ class SafariHistory(IOSExtraction):
if self.indicators.check_domain(result["url"]): if self.indicators.check_domain(result["url"]):
self.detected.append(result) self.detected.append(result)
def run(self): def _process_history_db(self, history_path):
self._find_ios_database(backup_ids=SAFARI_HISTORY_BACKUP_IDS, root_paths=SAFARI_HISTORY_ROOT_PATHS) conn = sqlite3.connect(history_path)
self.log.info("Found Safari history database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
cur = conn.cursor() cur = conn.cursor()
cur.execute(""" cur.execute("""
SELECT SELECT
@ -100,20 +95,33 @@ class SafariHistory(IOSExtraction):
ORDER BY history_visits.visit_time; ORDER BY history_visits.visit_time;
""") """)
items = [] for row in cur:
for item in cur: self.results.append({
items.append({ "id": row[0],
"id": item[0], "url": row[1],
"url": item[1], "visit_id": row[2],
"visit_id": item[2], "timestamp": row[3],
"timestamp": item[3], "isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[3])), "redirect_source": row[4],
"redirect_source": item[4], "redirect_destination": row[5],
"redirect_destination": item[5] "safari_history_db": os.path.relpath(history_path, self.base_folder),
}) })
cur.close() cur.close()
conn.close() conn.close()
self.log.info("Extracted a total of %d history items", len(items)) def run(self):
self.results = items if self.is_backup:
for history_file in self._get_backup_files_from_manifest(relative_path=SAFARI_HISTORY_BACKUP_RELPATH):
history_path = self._get_backup_file_from_id(history_file["file_id"])
if not history_path:
continue
self.log.info("Found Safari history database at path: %s", history_path)
self._process_history_db(history_path)
elif self.is_fs_dump:
for history_path in self._get_fs_files_from_patterns(SAFARI_HISTORY_ROOT_PATHS):
self.log.info("Found Safari history database at path: %s", history_path)
self._process_history_db(history_path)
self.log.info("Extracted a total of %d history records", len(self.results))

View File

@ -41,15 +41,13 @@ class SMS(IOSExtraction):
return return
for message in self.results: for message in self.results:
if not "text" in message: message_links = check_for_links(message.get("text", ""))
continue
message_links = check_for_links(message["text"])
if self.indicators.check_domains(message_links): if self.indicators.check_domains(message_links):
self.detected.append(message) self.detected.append(message)
def run(self): def run(self):
self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS) self._find_ios_database(backup_ids=SMS_BACKUP_IDS,
root_paths=SMS_ROOT_PATHS)
self.log.info("Found SMS database at path: %s", self.file_path) self.log.info("Found SMS database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
@ -78,17 +76,17 @@ class SMS(IOSExtraction):
# We convert Mac's ridiculous timestamp format. # We convert Mac's ridiculous timestamp format.
message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message["date"])) message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message["date"]))
message["direction"] = ("sent" if message["is_from_me"] == 1 else "received") message["direction"] = ("sent" if message.get("is_from_me", 0) == 1 else "received")
# Sometimes "text" is None instead of empty string. # Sometimes "text" is None instead of empty string.
if message["text"] is None: if not message.get("text", None):
message["text"] = "" message["text"] = ""
# Extract links from the SMS message. # Extract links from the SMS message.
message_links = check_for_links(message["text"]) message_links = check_for_links(message.get("text", ""))
# If we find links in the messages or if they are empty we add them to the list. # If we find links in the messages or if they are empty we add them to the list.
if message_links or message["text"].strip() == "": if message_links or message.get("text", "").strip() == "":
self.results.append(message) self.results.append(message)
cur.close() cur.close()

View File

@ -36,7 +36,8 @@ class SMSAttachments(IOSExtraction):
} }
def run(self): def run(self):
self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS) self._find_ios_database(backup_ids=SMS_BACKUP_IDS,
root_paths=SMS_ROOT_PATHS)
self.log.info("Found SMS database at path: %s", self.file_path) self.log.info("Found SMS database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
@ -50,19 +51,20 @@ class SMSAttachments(IOSExtraction):
FROM attachment FROM attachment
LEFT JOIN message_attachment_join ON message_attachment_join.attachment_id = attachment.ROWID LEFT JOIN message_attachment_join ON message_attachment_join.attachment_id = attachment.ROWID
LEFT JOIN message ON message.ROWID = message_attachment_join.message_id LEFT JOIN message ON message.ROWID = message_attachment_join.message_id
LEFT JOIN handle ON handle.ROWID = message.handle_id LEFT JOIN handle ON handle.ROWID = message.handle_id;
""") """)
names = [description[0] for description in cur.description] names = [description[0] for description in cur.description]
for item in cur: for item in cur:
attachment = {} attachment = {}
for index, value in enumerate(item): for index, value in enumerate(item):
if (names[index] in ["user_info", "sticker_user_info", "attribution_info", if (names[index] in ["user_info", "sticker_user_info",
"ck_server_change_token_blob", "sr_ck_server_change_token_blob"]) and value: "attribution_info",
"ck_server_change_token_blob",
"sr_ck_server_change_token_blob"]) and value:
value = b64encode(value).decode() value = b64encode(value).decode()
attachment[names[index]] = value attachment[names[index]] = value
# We convert Mac's ridiculous timestamp format.
attachment["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["created_date"])) attachment["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["created_date"]))
attachment["start_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["start_date"])) attachment["start_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["start_date"]))
attachment["direction"] = ("sent" if attachment["is_outgoing"] == 1 else "received") attachment["direction"] = ("sent" if attachment["is_outgoing"] == 1 else "received")

View File

@ -46,8 +46,7 @@ class WebkitResourceLoadStatistics(IOSExtraction):
def _process_observations_db(self, db_path, key): def _process_observations_db(self, db_path, key):
self.log.info("Found WebKit ResourceLoadStatistics observations.db file at path %s", db_path) self.log.info("Found WebKit ResourceLoadStatistics observations.db file at path %s", db_path)
if self._is_database_malformed(db_path): self._recover_sqlite_db_if_needed(db_path)
self._recover_database(db_path)
conn = sqlite3.connect(db_path) conn = sqlite3.connect(db_path)
cur = conn.cursor() cur = conn.cursor()
@ -66,7 +65,6 @@ class WebkitResourceLoadStatistics(IOSExtraction):
"registrable_domain": row[1], "registrable_domain": row[1],
"last_seen": row[2], "last_seen": row[2],
"had_user_interaction": bool(row[3]), "had_user_interaction": bool(row[3]),
# TODO: Fix isodate.
"last_seen_isodate": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(row[2]))), "last_seen_isodate": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(row[2]))),
}) })
@ -83,5 +81,5 @@ class WebkitResourceLoadStatistics(IOSExtraction):
except Exception as e: except Exception as e:
self.log.info("Unable to search for WebKit observations.db: %s", e) self.log.info("Unable to search for WebKit observations.db: %s", e)
elif self.is_fs_dump: elif self.is_fs_dump:
for db_path in self._get_fs_files_from_pattern(WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS): for db_path in self._get_fs_files_from_patterns(WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS):
self._process_observations_db(db_path=db_path, key=os.path.relpath(db_path, self.base_folder)) self._process_observations_db(db_path=db_path, key=os.path.relpath(db_path, self.base_folder))

View File

@ -14,6 +14,7 @@ from ..base import IOSExtraction
WEBKIT_SESSION_RESOURCE_LOG_BACKUP_IDS = [ WEBKIT_SESSION_RESOURCE_LOG_BACKUP_IDS = [
"a500ee38053454a02e990957be8a251935e28d3f", "a500ee38053454a02e990957be8a251935e28d3f",
] ]
WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH = "Library/WebKit/WebsiteData/ResourceLoadStatistics/full_browsing_session_resourceLog.plist"
WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS = [ WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS = [
"private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/full_browsing_session_resourceLog.plist", "private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/full_browsing_session_resourceLog.plist",
"private/var/mobile/Containers/Data/Application/*/Library/WebKit/WebsiteData/ResourceLoadStatistics/full_browsing_session_resourceLog.plist", "private/var/mobile/Containers/Data/Application/*/Library/WebKit/WebsiteData/ResourceLoadStatistics/full_browsing_session_resourceLog.plist",
@ -33,31 +34,6 @@ class WebkitSessionResourceLog(IOSExtraction):
self.results = {} self.results = {}
def _extract_browsing_stats(self, file_path):
items = []
with open(file_path, "rb") as handle:
file_plist = plistlib.read(handle)
if "browsingStatistics" not in file_plist:
return items
browsing_stats = file_plist["browsingStatistics"]
for item in browsing_stats:
items.append({
"origin": item.get("PrevalentResourceOrigin", ""),
"redirect_source": item.get("topFrameUniqueRedirectsFrom", ""),
"redirect_destination": item.get("topFrameUniqueRedirectsTo", ""),
"subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""),
"subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""),
"user_interaction": item.get("hadUserInteraction"),
"most_recent_interaction": convert_timestamp_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_timestamp_to_iso(item["lastSeen"]),
})
return items
@staticmethod @staticmethod
def _extract_domains(entries): def _extract_domains(entries):
if not entries: if not entries:
@ -111,13 +87,41 @@ class WebkitSessionResourceLog(IOSExtraction):
self.log.warning("Found HTTP redirect between suspicious domains: %s", redirect_path) self.log.warning("Found HTTP redirect between suspicious domains: %s", redirect_path)
def _extract_browsing_stats(self, log_path):
items = []
with open(log_path, "rb") as handle:
file_plist = plistlib.load(handle)
if "browsingStatistics" not in file_plist:
return items
browsing_stats = file_plist["browsingStatistics"]
for item in browsing_stats:
items.append({
"origin": item.get("PrevalentResourceOrigin", ""),
"redirect_source": item.get("topFrameUniqueRedirectsFrom", ""),
"redirect_destination": item.get("topFrameUniqueRedirectsTo", ""),
"subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""),
"subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""),
"user_interaction": item.get("hadUserInteraction"),
"most_recent_interaction": convert_timestamp_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_timestamp_to_iso(item["lastSeen"]),
})
return items
def run(self): def run(self):
if self.is_backup: if self.is_backup:
self._find_ios_database(backup_ids=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_IDS) for log_path in self._get_backup_files_from_manifest(relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH):
self.results[self.file_path] = self._extract_browsing_stats(self.file_path) self.log.info("Found Safari browsing session resource log at path: %s", log_path)
return self.results[log_path] = self._extract_browsing_stats(log_path)
elif self.is_fs_dump:
for log_path in self._get_fs_files_from_patterns(WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS):
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
key = os.path.relpath(log_path, self.base_folder)
self.results[key] = self._extract_browsing_stats(log_path)
for log_file in self._get_fs_files_from_pattern(WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS): self.log.info("Extracted records from %d Safari browsing session resource logs",
self.log.info("Found Safari browsing session resource log at path: %s", log_file) len(self.results))
key = os.path.relpath(log_file, self.base_folder)
self.results[key] = self._extract_browsing_stats(log_file)

View File

@ -30,12 +30,12 @@ class Whatsapp(IOSExtraction):
log=log, results=results) log=log, results=results)
def serialize(self, record): def serialize(self, record):
text = record["ZTEXT"].replace("\n", "\\n") text = record.get("ZTEXT", "").replace("\n", "\\n")
return { return {
"timestamp": record["isodate"], "timestamp": record.get("isodate"),
"module": self.__class__.__name__, "module": self.__class__.__name__,
"event": "message", "event": "message",
"data": f"{text} from {record['ZFROMJID']}" "data": f"{text} from {record.get('ZFROMJID', 'Unknown')}",
} }
def check_indicators(self): def check_indicators(self):
@ -43,16 +43,13 @@ class Whatsapp(IOSExtraction):
return return
for message in self.results: for message in self.results:
if not "ZTEXT" in message: message_links = check_for_links(message.get("ZTEXT", ""))
continue
message_links = check_for_links(message["ZTEXT"])
if self.indicators.check_domains(message_links): if self.indicators.check_domains(message_links):
self.detected.append(message) self.detected.append(message)
def run(self): def run(self):
self._find_ios_database(backup_ids=WHATSAPP_BACKUP_IDS, root_paths=WHATSAPP_ROOT_PATHS) self._find_ios_database(backup_ids=WHATSAPP_BACKUP_IDS,
root_paths=WHATSAPP_ROOT_PATHS)
log.info("Found WhatsApp database at path: %s", self.file_path) log.info("Found WhatsApp database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
@ -65,11 +62,11 @@ class Whatsapp(IOSExtraction):
for index, value in enumerate(message): for index, value in enumerate(message):
new_message[names[index]] = value new_message[names[index]] = value
if not new_message["ZTEXT"]: if not new_message.get("ZTEXT", None):
continue continue
# We convert Mac's silly timestamp again. # We convert Mac's silly timestamp again.
new_message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(new_message["ZMESSAGEDATE"])) new_message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(new_message.get("ZMESSAGEDATE")))
# Extract links from the WhatsApp message. # Extract links from the WhatsApp message.
message_links = check_for_links(new_message["ZTEXT"]) message_links = check_for_links(new_message["ZTEXT"])

View File

@ -24,7 +24,8 @@ class NetBase(IOSExtraction):
def _extract_net_data(self): def _extract_net_data(self):
conn = sqlite3.connect(self.file_path) conn = sqlite3.connect(self.file_path)
cur = conn.cursor() cur = conn.cursor()
cur.execute("""SELECT cur.execute("""
SELECT
ZPROCESS.ZFIRSTTIMESTAMP, ZPROCESS.ZFIRSTTIMESTAMP,
ZPROCESS.ZTIMESTAMP, ZPROCESS.ZTIMESTAMP,
ZPROCESS.ZPROCNAME, ZPROCESS.ZPROCNAME,
@ -38,43 +39,42 @@ class NetBase(IOSExtraction):
ZLIVEUSAGE.ZHASPROCESS, ZLIVEUSAGE.ZHASPROCESS,
ZLIVEUSAGE.ZTIMESTAMP ZLIVEUSAGE.ZTIMESTAMP
FROM ZLIVEUSAGE FROM ZLIVEUSAGE
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK;""") LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK;
""")
items = [] for row in cur:
for item in cur:
# ZPROCESS records can be missing after the JOIN. Handle NULL timestamps. # ZPROCESS records can be missing after the JOIN. Handle NULL timestamps.
if item[0] and item[1]: if row[0] and row[1]:
first_isodate = convert_timestamp_to_iso(convert_mactime_to_unix(item[0])) first_isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0]))
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(item[1])) isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[1]))
else: else:
first_isodate = item[0] first_isodate = row[0]
isodate = item[1] isodate = row[1]
if item[11]: if row[11]:
live_timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(item[11])) live_timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(row[11]))
else: else:
live_timestamp = "" live_timestamp = ""
items.append({ self.results.append({
"first_isodate": first_isodate, "first_isodate": first_isodate,
"isodate": isodate, "isodate": isodate,
"proc_name": item[2], "proc_name": row[2],
"bundle_id": item[3], "bundle_id": row[3],
"proc_id": item[4], "proc_id": row[4],
"wifi_in": item[5], "wifi_in": row[5],
"wifi_out": item[6], "wifi_out": row[6],
"wwan_in": item[7], "wwan_in": row[7],
"wwan_out": item[8], "wwan_out": row[8],
"live_id": item[9], "live_id": row[9],
"live_proc_id": item[10], "live_proc_id": row[10],
"live_isodate": live_timestamp, "live_isodate": live_timestamp,
}) })
cur.close() cur.close()
conn.close() conn.close()
self.log.info("Extracted information on %d processes", len(items)) self.log.info("Extracted information on %d processes", len(self.results))
self.results = items
def serialize(self, record): def serialize(self, record):
record_data = f"{record['proc_name']} (Bundle ID: {record['bundle_id']}, ID: {record['proc_id']})" record_data = f"{record['proc_name']} (Bundle ID: {record['bundle_id']}, ID: {record['proc_id']})"