from indexer.config import ROOT_PATH, ROOT_NAME, DRY_RUN from indexer.scanner import scan_files from indexer.db import ( get_connection, preload_mark_all_missing, find_file_by_path, insert_file, update_file, path_hash, ) from indexer.events import log_event def main(): print("=" * 60) print("ORDINACE DROPBOX BACKUP – INDEXER") print(f"Root : {ROOT_PATH}") print(f"Name : {ROOT_NAME}") print(f"DRY RUN : {DRY_RUN}") print("=" * 60) conn = get_connection() cur = conn.cursor() if not DRY_RUN: preload_mark_all_missing() created = modified = seen = 0 for file in scan_files(ROOT_PATH): seen += 1 ph = path_hash(file["full_path"]) row = find_file_by_path(cur, ph) if row is None: created += 1 if not DRY_RUN: file_id = insert_file(cur, file) log_event(cur, file_id, "CREATED", new=file) else: file_id, old_size, old_mtime, old_hash = row if old_size != file["size"] or old_hash != file["content_hash"]: modified += 1 if not DRY_RUN: update_file(cur, file_id, file) log_event( cur, file_id, "MODIFIED", old={"size": old_size, "content_hash": old_hash}, new=file, ) else: if not DRY_RUN: cur.execute( "UPDATE files SET last_seen = NOW(), exists_now = 1 WHERE id = %s", (file_id,) ) if seen % 500 == 0: print(f"{seen} files scanned...") if not DRY_RUN: conn.commit() print("================================") print(f"Scanned : {seen}") print(f"Created : {created}") print(f"Modified : {modified}") conn.close() if __name__ == "__main__": main()