#!/usr/bin/env python3 import os import shutil import threading from openpilot.system.hardware.hw import Paths from openpilot.common.swaglog import cloudlog from openpilot.system.loggerd.config import get_available_bytes, get_available_percent from openpilot.system.loggerd.uploader import listdir_by_creation from openpilot.system.loggerd.xattr_cache import getxattr # CLEARPILOT: increased from 5 GB to 9 GB to reserve space for screen recordings MIN_BYTES = 9 * 1024 * 1024 * 1024 MIN_PERCENT = 10 DELETE_LAST = ['boot', 'crash'] # CLEARPILOT: screen recorder video directory VIDEOS_DIR = '/data/media/0/videos' PRESERVE_ATTR_NAME = 'user.preserve' PRESERVE_ATTR_VALUE = b'1' PRESERVE_COUNT = 5 def has_preserve_xattr(d: str) -> bool: return getxattr(os.path.join(Paths.log_root(), d), PRESERVE_ATTR_NAME) == PRESERVE_ATTR_VALUE def get_preserved_segments(dirs_by_creation: list[str]) -> list[str]: preserved = [] for n, d in enumerate(filter(has_preserve_xattr, reversed(dirs_by_creation))): if n == PRESERVE_COUNT: break date_str, _, seg_str = d.rpartition("--") # ignore non-segment directories if not date_str: continue try: seg_num = int(seg_str) except ValueError: continue # preserve segment and its prior preserved.append(d) preserved.append(f"{date_str}--{seg_num - 1}") return preserved def delete_oldest_video(): """CLEARPILOT: delete oldest dashcam footage when disk space is low. Trip directories are /data/media/0/videos/YYYYMMDD-HHMMSS/ containing .mp4 segments. Deletes entire oldest trip directory first. If only one trip remains (active), deletes individual segments oldest-first within it. Also cleans up legacy flat .mp4 files.""" try: if not os.path.isdir(VIDEOS_DIR): return False # Collect legacy flat mp4 files and trip directories legacy_files = [] trip_dirs = [] for entry in os.listdir(VIDEOS_DIR): path = os.path.join(VIDEOS_DIR, entry) if os.path.isfile(path) and entry.endswith('.mp4'): legacy_files.append(entry) elif os.path.isdir(path): trip_dirs.append(entry) # Delete legacy flat files first (oldest by name) if legacy_files: legacy_files.sort() delete_path = os.path.join(VIDEOS_DIR, legacy_files[0]) cloudlog.info(f"deleting legacy video {delete_path}") os.remove(delete_path) return True if not trip_dirs: return False trip_dirs.sort() # sorted by timestamp name = chronological order # If more than one trip, delete the oldest entire trip directory if len(trip_dirs) > 1: delete_path = os.path.join(VIDEOS_DIR, trip_dirs[0]) cloudlog.info(f"deleting trip {delete_path}") shutil.rmtree(delete_path) return True # Only one trip left (likely active) — delete oldest segment within it trip_path = os.path.join(VIDEOS_DIR, trip_dirs[0]) segments = sorted(f for f in os.listdir(trip_path) if f.endswith('.mp4')) if not segments: return False delete_path = os.path.join(trip_path, segments[0]) cloudlog.info(f"deleting segment {delete_path}") os.remove(delete_path) return True except OSError: cloudlog.exception(f"issue deleting video from {VIDEOS_DIR}") return False # CLEARPILOT: max total size for /data/log2 session logs LOG2_MAX_BYTES = 4 * 1024 * 1024 * 1024 def cleanup_log2(): """Delete oldest session log directories until /data/log2 is under LOG2_MAX_BYTES.""" log_base = "/data/log2" if not os.path.isdir(log_base): return # Get all session dirs sorted oldest first (by name = timestamp) dirs = [] for entry in sorted(os.listdir(log_base)): if entry == "current": continue path = os.path.join(log_base, entry) if os.path.isdir(path) and not os.path.islink(path): size = sum(f.stat().st_size for f in os.scandir(path) if f.is_file()) dirs.append((entry, path, size)) total = sum(s for _, _, s in dirs) # Also count current session current = os.path.join(log_base, "current") if os.path.isdir(current): total += sum(f.stat().st_size for f in os.scandir(current) if f.is_file()) # Delete oldest until under quota while total > LOG2_MAX_BYTES and dirs: entry, path, size = dirs.pop(0) try: cloudlog.info(f"deleting log session {path} ({size // 1024 // 1024} MB)") shutil.rmtree(path) total -= size except OSError: cloudlog.exception(f"issue deleting log {path}") def deleter_thread(exit_event): while not exit_event.is_set(): out_of_bytes = get_available_bytes(default=MIN_BYTES + 1) < MIN_BYTES out_of_percent = get_available_percent(default=MIN_PERCENT + 1) < MIN_PERCENT if out_of_percent or out_of_bytes: # CLEARPILOT: try deleting oldest video first, then fall back to log segments if delete_oldest_video(): exit_event.wait(.1) continue dirs = listdir_by_creation(Paths.log_root()) # skip deleting most recent N preserved segments (and their prior segment) preserved_dirs = get_preserved_segments(dirs) # remove the earliest directory we can for delete_dir in sorted(dirs, key=lambda d: (d in DELETE_LAST, d in preserved_dirs)): delete_path = os.path.join(Paths.log_root(), delete_dir) if any(name.endswith(".lock") for name in os.listdir(delete_path)): continue try: cloudlog.info(f"deleting {delete_path}") if os.path.isfile(delete_path): os.remove(delete_path) else: shutil.rmtree(delete_path) break except OSError: cloudlog.exception(f"issue deleting {delete_path}") exit_event.wait(.1) else: # CLEARPILOT: enforce log2 size quota even when disk space is fine cleanup_log2() exit_event.wait(30) def main(): deleter_thread(threading.Event()) if __name__ == "__main__": main()