Overhaul of mvt-ios modules

This commit is contained in:
Nex 2021-08-16 10:50:35 +02:00
parent 24d7187303
commit 96e4a9a4a4
24 changed files with 304 additions and 270 deletions

View File

@ -115,10 +115,10 @@ class Indicators:
# Then we just check the top level domain.
if final_url.top_level.lower() == ioc:
if orig_url.is_shortened and orig_url.url != final_url.url:
self.log.warning("Found a sub-domain matching a suspicious top level %s shortened as %s",
self.log.warning("Found a sub-domain matching a known suspicious top level %s shortened as %s",
final_url.url, orig_url.url)
else:
self.log.warning("Found a sub-domain matching a suspicious top level: %s", final_url.url)
self.log.warning("Found a sub-domain matching a known suspicious top level: %s", final_url.url)
return True

View File

@ -26,8 +26,11 @@ class IOSExtraction(MVTModule):
self.is_fs_dump = False
self.is_sysdiagnose = False
def _is_database_malformed(self, file_path):
# Check if the database is malformed.
def _recover_sqlite_db_if_needed(self, file_path):
"""Tries to recover a malformed database by running a .clone command.
:param file_path: Path to the malformed database file.
"""
# TODO: Find a better solution.
conn = sqlite3.connect(file_path)
cur = conn.cursor()
@ -40,19 +43,11 @@ class IOSExtraction(MVTModule):
finally:
conn.close()
return recover
def _recover_database(self, file_path):
"""Tries to recover a malformed database by running a .clone command.
:param file_path: Path to the malformed database file.
"""
# TODO: Find a better solution.
if not recover:
return
self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
if not os.path.exists(file_path):
return
if not shutil.which("sqlite3"):
raise DatabaseCorruptedError("Unable to recover without sqlite3 binary. Please install sqlite3!")
if '"' in file_path:
@ -107,7 +102,7 @@ class IOSExtraction(MVTModule):
return None
def _get_fs_files_from_pattern(self, root_paths):
def _get_fs_files_from_patterns(self, root_paths):
for root_path in root_paths:
for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
if not os.path.exists(found_path):
@ -116,8 +111,11 @@ class IOSExtraction(MVTModule):
yield found_path
def _find_ios_database(self, backup_ids=None, root_paths=[]):
"""Try to locate the module's database file from either an iTunes
backup or a full filesystem dump.
"""Try to locate a module's database file from either an iTunes
backup or a full filesystem dump. This is intended only for
modules that expect to work with a single SQLite database.
If a module requires to process multiple databases or files,
you should use the helper functions above.
:param backup_id: iTunes backup database file's ID (or hash).
:param root_paths: Glob patterns for files to seek in filesystem dump.
"""
@ -138,15 +136,9 @@ class IOSExtraction(MVTModule):
if not file_path or not os.path.exists(file_path):
# We reset the file_path.
file_path = None
for root_path in root_paths:
for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
# If we find a valid path, we set file_path.
if os.path.exists(found_path):
file_path = found_path
break
# Otherwise, we reset the file_path again.
file_path = None
for found_path in self._get_fs_files_from_patterns(root_paths):
file_path = found_path
break
# If we do not find any, we fail.
if file_path:
@ -154,5 +146,4 @@ class IOSExtraction(MVTModule):
else:
raise DatabaseNotFoundError("Unable to find the module's database file")
if self._is_database_malformed(self.file_path):
self._recover_database(self.file_path)
self._recover_sqlite_db_if_needed(self.file_path)

View File

@ -3,6 +3,8 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import sqlite3
from ..net_base import NetBase
NETUSAGE_ROOT_PATHS = [
@ -21,8 +23,13 @@ class Netusage(NetBase):
log=log, results=results)
def run(self):
self._find_ios_database(root_paths=NETUSAGE_ROOT_PATHS)
self.log.info("Found NetUsage database at path: %s", self.file_path)
for netusage_path in self._get_fs_files_from_patterns(NETUSAGE_ROOT_PATHS):
self.file_path = netusage_path
self.log.info("Found NetUsage database at path: %s", self.file_path)
try:
self._extract_net_data()
except sqlite3.OperationalError as e:
self.log.info("Skipping this NetUsage database because it seems empty or malformed: %s", e)
continue
self._extract_net_data()
self._find_suspicious_processes()

View File

@ -39,50 +39,57 @@ class SafariFavicon(IOSExtraction):
if self.indicators.check_domain(result["url"]) or self.indicators.check_domain(result["icon_url"]):
self.detected.append(result)
def run(self):
self._find_ios_database(root_paths=SAFARI_FAVICON_ROOT_PATHS)
self.log.info("Found Safari favicon cache database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
def _process_favicon_db(self, file_path):
conn = sqlite3.connect(file_path)
# Fetch valid icon cache.
cur = conn.cursor()
cur.execute("""SELECT
cur.execute("""
SELECT
page_url.url,
icon_info.url,
icon_info.timestamp
FROM page_url
JOIN icon_info ON page_url.uuid = icon_info.uuid
ORDER BY icon_info.timestamp;""")
ORDER BY icon_info.timestamp;
""")
items = []
for item in cur:
items.append({
"url": item[0],
"icon_url": item[1],
"timestamp": item[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[2])),
for row in cur:
self.results.append({
"url": row[0],
"icon_url": row[1],
"timestamp": row[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"type": "valid",
"safari_favicon_db_path": file_path,
})
# Fetch icons from the rejected icons table.
cur.execute("""SELECT
cur.execute("""
SELECT
page_url,
icon_url,
timestamp
FROM rejected_resources ORDER BY timestamp;""")
FROM rejected_resources ORDER BY timestamp;
""")
for item in cur:
items.append({
"url": item[0],
"icon_url": item[1],
"timestamp": item[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[2])),
for row in cur:
self.results.append({
"url": row[0],
"icon_url": row[1],
"timestamp": row[2],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[2])),
"type": "rejected",
"safari_favicon_db_path": file_path,
})
cur.close()
conn.close()
self.log.info("Extracted a total of %d favicon records", len(items))
self.results = sorted(items, key=lambda item: item["isodate"])
def run(self):
for file_path in self._get_fs_files_from_patterns(SAFARI_FAVICON_ROOT_PATHS):
self.log.info("Found Safari favicon cache database at path: %s", file_path)
self._process_favicon_db(file_path)
self.log.info("Extracted a total of %d favicon records", len(self.results))
self.results = sorted(self.results, key=lambda x: x["isodate"])

View File

@ -32,7 +32,7 @@ class IOSVersionHistory(IOSExtraction):
}
def run(self):
for found_path in self._get_fs_files_from_pattern(IOS_ANALYTICS_JOURNAL_PATHS):
for found_path in self._get_fs_files_from_patterns(IOS_ANALYTICS_JOURNAL_PATHS):
with open(found_path, "r") as analytics_log:
log_line = json.loads(analytics_log.readline().strip())

View File

@ -23,7 +23,7 @@ class WebkitBase(IOSExtraction):
self.detected.append(item)
def _process_webkit_folder(self, root_paths):
for found_path in self._get_fs_files_from_pattern(root_paths):
for found_path in self._get_fs_files_from_patterns(root_paths):
key = os.path.relpath(found_path, self.base_folder)
for name in os.listdir(found_path):

View File

@ -34,7 +34,8 @@ class Calls(IOSExtraction):
}
def run(self):
self._find_ios_database(backup_ids=CALLS_BACKUP_IDS, root_paths=CALLS_ROOT_PATHS)
self._find_ios_database(backup_ids=CALLS_BACKUP_IDS,
root_paths=CALLS_ROOT_PATHS)
self.log.info("Found Calls database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
@ -42,17 +43,17 @@ class Calls(IOSExtraction):
cur.execute("""
SELECT
ZDATE, ZDURATION, ZLOCATION, ZADDRESS, ZSERVICE_PROVIDER
FROM ZCALLRECORD;
FROM ZCALLRECORD;
""")
names = [description[0] for description in cur.description]
for entry in cur:
for row in cur:
self.results.append({
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(entry[0])),
"duration": entry[1],
"location": entry[2],
"number": entry[3].decode("utf-8") if entry[3] and entry[3] is bytes else entry[3],
"provider": entry[4]
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"duration": row[1],
"location": row[2],
"number": row[3].decode("utf-8") if row[3] and row[3] is bytes else row[3],
"provider": row[4]
})
cur.close()

View File

@ -45,14 +45,16 @@ class ChromeFavicon(IOSExtraction):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS, root_paths=CHROME_FAVICON_ROOT_PATHS)
self._find_ios_database(backup_ids=CHROME_FAVICON_BACKUP_IDS,
root_paths=CHROME_FAVICON_ROOT_PATHS)
self.log.info("Found Chrome favicon cache database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
# Fetch icon cache
cur = conn.cursor()
cur.execute("""SELECT
cur.execute("""
SELECT
icon_mapping.page_url,
favicons.url,
favicon_bitmaps.last_updated,
@ -60,14 +62,15 @@ class ChromeFavicon(IOSExtraction):
FROM icon_mapping
JOIN favicon_bitmaps ON icon_mapping.icon_id = favicon_bitmaps.icon_id
JOIN favicons ON icon_mapping.icon_id = favicons.id
ORDER BY icon_mapping.id;""")
ORDER BY icon_mapping.id;
""")
items = []
for item in cur:
last_timestamp = int(item[2]) or int(item[3])
items.append({
"url": item[0],
"icon_url": item[1],
records = []
for row in cur:
last_timestamp = int(row[2]) or int(row[3])
records.append({
"url": row[0],
"icon_url": row[1],
"timestamp": last_timestamp,
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(last_timestamp)),
})
@ -75,5 +78,5 @@ class ChromeFavicon(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d favicon records", len(items))
self.results = sorted(items, key=lambda item: item["isodate"])
self.log.info("Extracted a total of %d favicon records", len(records))
self.results = sorted(records, key=lambda row: row["isodate"])

View File

@ -45,7 +45,8 @@ class ChromeHistory(IOSExtraction):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS, root_paths=CHROME_HISTORY_ROOT_PATHS)
self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS,
root_paths=CHROME_HISTORY_ROOT_PATHS)
self.log.info("Found Chrome history database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)

View File

@ -39,9 +39,9 @@ class Contacts(IOSExtraction):
""")
names = [description[0] for description in cur.description]
for entry in cur:
for row in cur:
new_contact = {}
for index, value in enumerate(entry):
for index, value in enumerate(row):
new_contact[names[index]] = value
self.results.append(new_contact)
@ -49,4 +49,5 @@ class Contacts(IOSExtraction):
cur.close()
conn.close()
self.log.info("Extracted a total of %d contacts from the address book", len(self.results))
self.log.info("Extracted a total of %d contacts from the address book",
len(self.results))

View File

@ -39,11 +39,13 @@ class FirefoxFavicon(IOSExtraction):
return
for result in self.results:
if self.indicators.check_domain(result["url"]) or self.indicators.check_domain(result["history_url"]):
if (self.indicators.check_domain(result.get("url", "")) or
self.indicators.check_domain(result.get("history_url", ""))):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS, root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS,
root_paths=FIREFOX_HISTORY_ROOT_PATHS)
self.log.info("Found Firefox favicon database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)

View File

@ -61,14 +61,14 @@ class FirefoxHistory(IOSExtraction):
WHERE visits.siteID = history.id;
""")
for item in cur:
for row in cur:
self.results.append({
"id": item[0],
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(item[1])),
"url": item[2],
"title": item[3],
"i1000000s_local": item[4],
"type": item[5]
"id": row[0],
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(row[1])),
"url": row[2],
"title": row[3],
"i1000000s_local": row[4],
"type": row[5]
})
cur.close()

View File

@ -39,19 +39,20 @@ class IDStatusCache(IOSExtraction):
return
for result in self.results:
if result["user"].startswith("mailto:"):
if result.get("user", "").startswith("mailto:"):
email = result["user"][7:].strip("'")
if self.indicators.check_email(email):
self.detected.append(result)
continue
if "\\x00\\x00" in result["user"]:
if "\\x00\\x00" in result.get("user", ""):
self.log.warning("Found an ID Status Cache entry with suspicious patterns: %s",
result["user"])
result.get("user"))
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS, root_paths=IDSTATUSCACHE_ROOT_PATHS)
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS,
root_paths=IDSTATUSCACHE_ROOT_PATHS)
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle:
@ -78,7 +79,7 @@ class IDStatusCache(IOSExtraction):
entry_counter = collections.Counter([entry["user"] for entry in id_status_cache_entries])
for entry in id_status_cache_entries:
# Add total count of occurrences to the status cache entry
# Add total count of occurrences to the status cache entry.
entry["occurrences"] = entry_counter[entry["user"]]
self.results.append(entry)

View File

@ -116,55 +116,56 @@ class InteractionC(IOSExtraction):
ZINTERACTIONS.ZGROUPNAME,
ZINTERACTIONS.ZDERIVEDINTENTIDENTIFIER,
ZINTERACTIONS.Z_PK
FROM ZINTERACTIONS
LEFT JOIN ZCONTACTS ON ZINTERACTIONS.ZSENDER = ZCONTACTS.Z_PK
LEFT JOIN Z_1INTERACTIONS ON ZINTERACTIONS.Z_PK == Z_1INTERACTIONS.Z_3INTERACTIONS
LEFT JOIN ZATTACHMENT ON Z_1INTERACTIONS.Z_1ATTACHMENTS == ZATTACHMENT.Z_PK
LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT
LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK;
FROM ZINTERACTIONS
LEFT JOIN ZCONTACTS ON ZINTERACTIONS.ZSENDER = ZCONTACTS.Z_PK
LEFT JOIN Z_1INTERACTIONS ON ZINTERACTIONS.Z_PK == Z_1INTERACTIONS.Z_3INTERACTIONS
LEFT JOIN ZATTACHMENT ON Z_1INTERACTIONS.Z_1ATTACHMENTS == ZATTACHMENT.Z_PK
LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT
LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK;
""")
names = [description[0] for description in cur.description]
for item in cur:
for row in cur:
self.results.append({
"start_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[0])),
"end_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[1])),
"bundle_id": item[2],
"account": item[3],
"target_bundle_id": item[4],
"direction": item[5],
"sender_display_name": item[6],
"sender_identifier": item[7],
"sender_personid": item[8],
"recipient_display_name": item[9],
"recipient_identifier": item[10],
"recipient_personid": item[11],
"recipient_count": item[12],
"domain_identifier": item[13],
"is_response": item[14],
"content": item[15],
"uti": item[16],
"content_url": item[17],
"size": item[18],
"photo_local_id": item[19],
"attachment_id": item[20],
"cloud_id": item[21],
"incoming_recipient_count": item[22],
"incoming_sender_count": item[23],
"outgoing_recipient_count": item[24],
"interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[25])) if item[25] else None,
"contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[26])) if item[26] else None,
"first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[27])) if item[27] else None,
"first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[28])) if item[28] else None,
"first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[29])) if item[29] else None,
"last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[30])) if item[30] else None,
"last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[31])) if item[31] else None,
"last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(item[32])) if item[32] else None,
"custom_id": item[33],
"location_uuid": item[35],
"group_name": item[36],
"derivied_intent_id": item[37],
"table_id": item[38]
"start_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
"end_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[1])),
"bundle_id": row[2],
"account": row[3],
"target_bundle_id": row[4],
"direction": row[5],
"sender_display_name": row[6],
"sender_identifier": row[7],
"sender_personid": row[8],
"recipient_display_name": row[9],
"recipient_identifier": row[10],
"recipient_personid": row[11],
"recipient_count": row[12],
"domain_identifier": row[13],
"is_response": row[14],
"content": row[15],
"uti": row[16],
"content_url": row[17],
"size": row[18],
"photo_local_id": row[19],
"attachment_id": row[20],
"cloud_id": row[21],
"incoming_recipient_count": row[22],
"incoming_sender_count": row[23],
"outgoing_recipient_count": row[24],
"interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[25])) if row[25] else None,
"contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[26])) if row[26] else None,
"first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[27])) if row[27] else None,
"first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[28])) if row[28] else None,
"first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[29])) if row[29] else None,
"last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[30])) if row[30] else None,
"last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[31])) if row[31] else None,
"last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[32])) if row[32] else None,
"custom_id": row[33],
"location_uuid": row[35],
"group_name": row[36],
"derivied_intent_id": row[37],
"table_id": row[38]
})
cur.close()

View File

@ -24,6 +24,7 @@ class LocationdClients(IOSExtraction):
super().__init__(file_path=file_path, base_folder=base_folder,
output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results)
self.timestamps = [
"ConsumptionPeriodBegin",
"ReceivingLocationInformationTimeStopped",
@ -50,7 +51,8 @@ class LocationdClients(IOSExtraction):
return records
def run(self):
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS, root_paths=LOCATIOND_ROOT_PATHS)
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS,
root_paths=LOCATIOND_ROOT_PATHS)
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle:

View File

@ -23,7 +23,8 @@ class Datausage(NetBase):
log=log, results=results)
def run(self):
self._find_ios_database(backup_ids=DATAUSAGE_BACKUP_IDS, root_paths=DATAUSAGE_ROOT_PATHS)
self._find_ios_database(backup_ids=DATAUSAGE_BACKUP_IDS,
root_paths=DATAUSAGE_ROOT_PATHS)
self.log.info("Found DataUsage database at path: %s", self.file_path)
self._extract_net_data()

View File

@ -4,6 +4,7 @@
# https://license.mvt.re/1.1/
import io
import os
import plistlib
import sqlite3
@ -15,6 +16,7 @@ from ..base import IOSExtraction
SAFARI_BROWSER_STATE_BACKUP_IDS = [
"3a47b0981ed7c10f3e2800aa66bac96a3b5db28e",
]
SAFARI_BROWSER_STATE_BACKUP_RELPATH = "Library/Safari/BrowserState.db"
SAFARI_BROWSER_STATE_ROOT_PATHS = [
"private/var/mobile/Library/Safari/BrowserState.db",
"private/var/mobile/Containers/Data/Application/*/Library/Safari/BrowserState.db",
@ -29,6 +31,8 @@ class SafariBrowserState(IOSExtraction):
output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results)
self._session_history_count = 0
def serialize(self, record):
return {
"timestamp": record["last_viewed_timestamp"],
@ -53,16 +57,12 @@ class SafariBrowserState(IOSExtraction):
if "entry_url" in session_entry and self.indicators.check_domain(session_entry["entry_url"]):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS,
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
self.log.info("Found Safari browser state database at path: %s", self.file_path)
def _process_browser_state_db(self, db_path):
conn = sqlite3.connect(db_path)
conn = sqlite3.connect(self.file_path)
# Fetch valid icon cache.
cur = conn.cursor()
cur.execute("""SELECT
cur.execute("""
SELECT
tabs.title,
tabs.url,
tabs.user_visible_url,
@ -70,34 +70,43 @@ class SafariBrowserState(IOSExtraction):
tab_sessions.session_data
FROM tabs
JOIN tab_sessions ON tabs.uuid = tab_sessions.tab_uuid
ORDER BY tabs.last_viewed_time;""")
ORDER BY tabs.last_viewed_time;
""")
session_history_count = 0
for item in cur:
for row in cur:
session_entries = []
if item[4]:
if row[4]:
# Skip a 4 byte header before the plist content.
session_plist = item[4][4:]
session_plist = row[4][4:]
session_data = plistlib.load(io.BytesIO(session_plist))
session_data = keys_bytes_to_string(session_data)
if "SessionHistoryEntries" in session_data["SessionHistory"]:
for session_entry in session_data["SessionHistory"]["SessionHistoryEntries"]:
session_history_count += 1
if "SessionHistoryEntries" in session_data.get("SessionHistory", {}):
for session_entry in session_data["SessionHistory"].get("SessionHistoryEntries"):
self._session_history_count += 1
session_entries.append({
"entry_title": session_entry["SessionHistoryEntryOriginalURL"],
"entry_url": session_entry["SessionHistoryEntryURL"],
"data_length": len(session_entry["SessionHistoryEntryData"]) if "SessionHistoryEntryData" in session_entry else 0,
"entry_title": session_entry.get("SessionHistoryEntryOriginalURL"),
"entry_url": session_entry.get("SessionHistoryEntryURL"),
"data_length": len(session_entry.get("SessionHistoryEntryData")) if "SessionHistoryEntryData" in session_entry else 0,
})
self.results.append({
"tab_title": item[0],
"tab_url": item[1],
"tab_visible_url": item[2],
"last_viewed_timestamp": convert_timestamp_to_iso(convert_mactime_to_unix(item[3])),
"tab_title": row[0],
"tab_url": row[1],
"tab_visible_url": row[2],
"last_viewed_timestamp": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"session_data": session_entries,
"safari_browser_state_db": os.path.relpath(db_path, self.base_folder),
})
def run(self):
# TODO: Is there really only one BrowserState.db in a device?
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS,
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
self.log.info("Extracted a total of %d tab records and %d session history entries",
len(self.results), session_history_count)
len(self.results), self._session_history_count)

View File

@ -3,6 +3,7 @@
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import os
import sqlite3
from mvt.common.url import URL
@ -10,10 +11,7 @@ from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
from ..base import IOSExtraction
SAFARI_HISTORY_BACKUP_IDS = [
"e74113c185fd8297e140cfcf9c99436c5cc06b57",
"1a0e7afc19d307da602ccdcece51af33afe92c53",
]
SAFARI_HISTORY_BACKUP_RELPATH = "Library/Safari/History.db"
SAFARI_HISTORY_ROOT_PATHS = [
"private/var/mobile/Library/Safari/History.db",
"private/var/mobile/Containers/Data/Application/*/Library/Safari/History.db",
@ -81,11 +79,8 @@ class SafariHistory(IOSExtraction):
if self.indicators.check_domain(result["url"]):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=SAFARI_HISTORY_BACKUP_IDS, root_paths=SAFARI_HISTORY_ROOT_PATHS)
self.log.info("Found Safari history database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
def _process_history_db(self, history_path):
conn = sqlite3.connect(history_path)
cur = conn.cursor()
cur.execute("""
SELECT
@ -100,20 +95,33 @@ class SafariHistory(IOSExtraction):
ORDER BY history_visits.visit_time;
""")
items = []
for item in cur:
items.append({
"id": item[0],
"url": item[1],
"visit_id": item[2],
"timestamp": item[3],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(item[3])),
"redirect_source": item[4],
"redirect_destination": item[5]
for row in cur:
self.results.append({
"id": row[0],
"url": row[1],
"visit_id": row[2],
"timestamp": row[3],
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[3])),
"redirect_source": row[4],
"redirect_destination": row[5],
"safari_history_db": os.path.relpath(history_path, self.base_folder),
})
cur.close()
conn.close()
self.log.info("Extracted a total of %d history items", len(items))
self.results = items
def run(self):
if self.is_backup:
for history_file in self._get_backup_files_from_manifest(relative_path=SAFARI_HISTORY_BACKUP_RELPATH):
history_path = self._get_backup_file_from_id(history_file["file_id"])
if not history_path:
continue
self.log.info("Found Safari history database at path: %s", history_path)
self._process_history_db(history_path)
elif self.is_fs_dump:
for history_path in self._get_fs_files_from_patterns(SAFARI_HISTORY_ROOT_PATHS):
self.log.info("Found Safari history database at path: %s", history_path)
self._process_history_db(history_path)
self.log.info("Extracted a total of %d history records", len(self.results))

View File

@ -41,15 +41,13 @@ class SMS(IOSExtraction):
return
for message in self.results:
if not "text" in message:
continue
message_links = check_for_links(message["text"])
message_links = check_for_links(message.get("text", ""))
if self.indicators.check_domains(message_links):
self.detected.append(message)
def run(self):
self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS)
self._find_ios_database(backup_ids=SMS_BACKUP_IDS,
root_paths=SMS_ROOT_PATHS)
self.log.info("Found SMS database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
@ -78,17 +76,17 @@ class SMS(IOSExtraction):
# We convert Mac's ridiculous timestamp format.
message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(message["date"]))
message["direction"] = ("sent" if message["is_from_me"] == 1 else "received")
message["direction"] = ("sent" if message.get("is_from_me", 0) == 1 else "received")
# Sometimes "text" is None instead of empty string.
if message["text"] is None:
if not message.get("text", None):
message["text"] = ""
# Extract links from the SMS message.
message_links = check_for_links(message["text"])
message_links = check_for_links(message.get("text", ""))
# If we find links in the messages or if they are empty we add them to the list.
if message_links or message["text"].strip() == "":
if message_links or message.get("text", "").strip() == "":
self.results.append(message)
cur.close()

View File

@ -36,7 +36,8 @@ class SMSAttachments(IOSExtraction):
}
def run(self):
self._find_ios_database(backup_ids=SMS_BACKUP_IDS, root_paths=SMS_ROOT_PATHS)
self._find_ios_database(backup_ids=SMS_BACKUP_IDS,
root_paths=SMS_ROOT_PATHS)
self.log.info("Found SMS database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
@ -50,19 +51,20 @@ class SMSAttachments(IOSExtraction):
FROM attachment
LEFT JOIN message_attachment_join ON message_attachment_join.attachment_id = attachment.ROWID
LEFT JOIN message ON message.ROWID = message_attachment_join.message_id
LEFT JOIN handle ON handle.ROWID = message.handle_id
LEFT JOIN handle ON handle.ROWID = message.handle_id;
""")
names = [description[0] for description in cur.description]
for item in cur:
attachment = {}
for index, value in enumerate(item):
if (names[index] in ["user_info", "sticker_user_info", "attribution_info",
"ck_server_change_token_blob", "sr_ck_server_change_token_blob"]) and value:
if (names[index] in ["user_info", "sticker_user_info",
"attribution_info",
"ck_server_change_token_blob",
"sr_ck_server_change_token_blob"]) and value:
value = b64encode(value).decode()
attachment[names[index]] = value
# We convert Mac's ridiculous timestamp format.
attachment["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["created_date"]))
attachment["start_date"] = convert_timestamp_to_iso(convert_mactime_to_unix(attachment["start_date"]))
attachment["direction"] = ("sent" if attachment["is_outgoing"] == 1 else "received")

View File

@ -46,8 +46,7 @@ class WebkitResourceLoadStatistics(IOSExtraction):
def _process_observations_db(self, db_path, key):
self.log.info("Found WebKit ResourceLoadStatistics observations.db file at path %s", db_path)
if self._is_database_malformed(db_path):
self._recover_database(db_path)
self._recover_sqlite_db_if_needed(db_path)
conn = sqlite3.connect(db_path)
cur = conn.cursor()
@ -66,7 +65,6 @@ class WebkitResourceLoadStatistics(IOSExtraction):
"registrable_domain": row[1],
"last_seen": row[2],
"had_user_interaction": bool(row[3]),
# TODO: Fix isodate.
"last_seen_isodate": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(row[2]))),
})
@ -83,5 +81,5 @@ class WebkitResourceLoadStatistics(IOSExtraction):
except Exception as e:
self.log.info("Unable to search for WebKit observations.db: %s", e)
elif self.is_fs_dump:
for db_path in self._get_fs_files_from_pattern(WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS):
for db_path in self._get_fs_files_from_patterns(WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS):
self._process_observations_db(db_path=db_path, key=os.path.relpath(db_path, self.base_folder))

View File

@ -14,6 +14,7 @@ from ..base import IOSExtraction
WEBKIT_SESSION_RESOURCE_LOG_BACKUP_IDS = [
"a500ee38053454a02e990957be8a251935e28d3f",
]
WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH = "Library/WebKit/WebsiteData/ResourceLoadStatistics/full_browsing_session_resourceLog.plist"
WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS = [
"private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/full_browsing_session_resourceLog.plist",
"private/var/mobile/Containers/Data/Application/*/Library/WebKit/WebsiteData/ResourceLoadStatistics/full_browsing_session_resourceLog.plist",
@ -33,31 +34,6 @@ class WebkitSessionResourceLog(IOSExtraction):
self.results = {}
def _extract_browsing_stats(self, file_path):
items = []
with open(file_path, "rb") as handle:
file_plist = plistlib.read(handle)
if "browsingStatistics" not in file_plist:
return items
browsing_stats = file_plist["browsingStatistics"]
for item in browsing_stats:
items.append({
"origin": item.get("PrevalentResourceOrigin", ""),
"redirect_source": item.get("topFrameUniqueRedirectsFrom", ""),
"redirect_destination": item.get("topFrameUniqueRedirectsTo", ""),
"subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""),
"subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""),
"user_interaction": item.get("hadUserInteraction"),
"most_recent_interaction": convert_timestamp_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_timestamp_to_iso(item["lastSeen"]),
})
return items
@staticmethod
def _extract_domains(entries):
if not entries:
@ -111,13 +87,41 @@ class WebkitSessionResourceLog(IOSExtraction):
self.log.warning("Found HTTP redirect between suspicious domains: %s", redirect_path)
def _extract_browsing_stats(self, log_path):
items = []
with open(log_path, "rb") as handle:
file_plist = plistlib.load(handle)
if "browsingStatistics" not in file_plist:
return items
browsing_stats = file_plist["browsingStatistics"]
for item in browsing_stats:
items.append({
"origin": item.get("PrevalentResourceOrigin", ""),
"redirect_source": item.get("topFrameUniqueRedirectsFrom", ""),
"redirect_destination": item.get("topFrameUniqueRedirectsTo", ""),
"subframe_under_origin": item.get("subframeUnderTopFrameOrigins", ""),
"subresource_under_origin": item.get("subresourceUnderTopFrameOrigins", ""),
"user_interaction": item.get("hadUserInteraction"),
"most_recent_interaction": convert_timestamp_to_iso(item["mostRecentUserInteraction"]),
"last_seen": convert_timestamp_to_iso(item["lastSeen"]),
})
return items
def run(self):
if self.is_backup:
self._find_ios_database(backup_ids=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_IDS)
self.results[self.file_path] = self._extract_browsing_stats(self.file_path)
return
for log_path in self._get_backup_files_from_manifest(relative_path=WEBKIT_SESSION_RESOURCE_LOG_BACKUP_RELPATH):
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
self.results[log_path] = self._extract_browsing_stats(log_path)
elif self.is_fs_dump:
for log_path in self._get_fs_files_from_patterns(WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS):
self.log.info("Found Safari browsing session resource log at path: %s", log_path)
key = os.path.relpath(log_path, self.base_folder)
self.results[key] = self._extract_browsing_stats(log_path)
for log_file in self._get_fs_files_from_pattern(WEBKIT_SESSION_RESOURCE_LOG_ROOT_PATHS):
self.log.info("Found Safari browsing session resource log at path: %s", log_file)
key = os.path.relpath(log_file, self.base_folder)
self.results[key] = self._extract_browsing_stats(log_file)
self.log.info("Extracted records from %d Safari browsing session resource logs",
len(self.results))

View File

@ -30,12 +30,12 @@ class Whatsapp(IOSExtraction):
log=log, results=results)
def serialize(self, record):
text = record["ZTEXT"].replace("\n", "\\n")
text = record.get("ZTEXT", "").replace("\n", "\\n")
return {
"timestamp": record["isodate"],
"timestamp": record.get("isodate"),
"module": self.__class__.__name__,
"event": "message",
"data": f"{text} from {record['ZFROMJID']}"
"data": f"{text} from {record.get('ZFROMJID', 'Unknown')}",
}
def check_indicators(self):
@ -43,16 +43,13 @@ class Whatsapp(IOSExtraction):
return
for message in self.results:
if not "ZTEXT" in message:
continue
message_links = check_for_links(message["ZTEXT"])
message_links = check_for_links(message.get("ZTEXT", ""))
if self.indicators.check_domains(message_links):
self.detected.append(message)
def run(self):
self._find_ios_database(backup_ids=WHATSAPP_BACKUP_IDS, root_paths=WHATSAPP_ROOT_PATHS)
self._find_ios_database(backup_ids=WHATSAPP_BACKUP_IDS,
root_paths=WHATSAPP_ROOT_PATHS)
log.info("Found WhatsApp database at path: %s", self.file_path)
conn = sqlite3.connect(self.file_path)
@ -65,11 +62,11 @@ class Whatsapp(IOSExtraction):
for index, value in enumerate(message):
new_message[names[index]] = value
if not new_message["ZTEXT"]:
if not new_message.get("ZTEXT", None):
continue
# We convert Mac's silly timestamp again.
new_message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(new_message["ZMESSAGEDATE"]))
new_message["isodate"] = convert_timestamp_to_iso(convert_mactime_to_unix(new_message.get("ZMESSAGEDATE")))
# Extract links from the WhatsApp message.
message_links = check_for_links(new_message["ZTEXT"])

View File

@ -24,7 +24,8 @@ class NetBase(IOSExtraction):
def _extract_net_data(self):
conn = sqlite3.connect(self.file_path)
cur = conn.cursor()
cur.execute("""SELECT
cur.execute("""
SELECT
ZPROCESS.ZFIRSTTIMESTAMP,
ZPROCESS.ZTIMESTAMP,
ZPROCESS.ZPROCNAME,
@ -38,43 +39,42 @@ class NetBase(IOSExtraction):
ZLIVEUSAGE.ZHASPROCESS,
ZLIVEUSAGE.ZTIMESTAMP
FROM ZLIVEUSAGE
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK;""")
LEFT JOIN ZPROCESS ON ZLIVEUSAGE.ZHASPROCESS = ZPROCESS.Z_PK;
""")
items = []
for item in cur:
for row in cur:
# ZPROCESS records can be missing after the JOIN. Handle NULL timestamps.
if item[0] and item[1]:
first_isodate = convert_timestamp_to_iso(convert_mactime_to_unix(item[0]))
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(item[1]))
if row[0] and row[1]:
first_isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0]))
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[1]))
else:
first_isodate = item[0]
isodate = item[1]
first_isodate = row[0]
isodate = row[1]
if item[11]:
live_timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(item[11]))
if row[11]:
live_timestamp = convert_timestamp_to_iso(convert_mactime_to_unix(row[11]))
else:
live_timestamp = ""
items.append({
self.results.append({
"first_isodate": first_isodate,
"isodate": isodate,
"proc_name": item[2],
"bundle_id": item[3],
"proc_id": item[4],
"wifi_in": item[5],
"wifi_out": item[6],
"wwan_in": item[7],
"wwan_out": item[8],
"live_id": item[9],
"live_proc_id": item[10],
"proc_name": row[2],
"bundle_id": row[3],
"proc_id": row[4],
"wifi_in": row[5],
"wifi_out": row[6],
"wwan_in": row[7],
"wwan_out": row[8],
"live_id": row[9],
"live_proc_id": row[10],
"live_isodate": live_timestamp,
})
cur.close()
conn.close()
self.log.info("Extracted information on %d processes", len(items))
self.results = items
self.log.info("Extracted information on %d processes", len(self.results))
def serialize(self, record):
record_data = f"{record['proc_name']} (Bundle ID: {record['bundle_id']}, ID: {record['proc_id']})"