Compare commits

..

19 Commits

Author SHA1 Message Date
aff7993093 git 2026-02-04 05:59:17 +01:00
3d11661997 git 2026-02-01 07:18:20 +01:00
7b0404bfe3 z230 2026-01-30 10:28:42 +01:00
0b7475c5c4 reporter 2026-01-20 06:18:42 +01:00
edee7cb8dd reporter 2026-01-20 06:18:29 +01:00
a990f7b394 git 2026-01-19 21:05:28 +01:00
52ae7cf60d git 2026-01-19 07:10:41 +01:00
5aac1b29c6 z230 2026-01-16 13:31:02 +01:00
c3c723e2e8 z230 2026-01-16 13:19:26 +01:00
f451317b6f z230 2026-01-16 13:10:07 +01:00
a4ede43153 z230 2026-01-13 14:43:44 +01:00
74083614e5 Remove PyCharm IDE files from repository 2026-01-11 08:18:35 +01:00
6d8ea05edb git 2026-01-11 08:15:46 +01:00
387d09b59c vbnotebook 2026-01-10 15:23:49 +01:00
a64f4b663f vbnotebook 2026-01-10 08:56:58 +01:00
84e38b01f1 vbnotebook 2026-01-09 06:38:19 +01:00
44162413e1 vbnotebook 2026-01-08 07:23:30 +01:00
1fc3323afd vbnotebook 2026-01-08 07:23:09 +01:00
8ea687b724 vbnotebook 2026-01-08 07:22:05 +01:00
25 changed files with 2622 additions and 45 deletions

3
.gitignore vendored
View File

@@ -6,8 +6,9 @@ __pycache__/
*.pyc *.pyc
*.log *.log
# IDE # IDE (PyCharm)
.idea/ .idea/
*.iml
# OS # OS
.DS_Store .DS_Store

3
.idea/.gitignore generated vendored
View File

@@ -1,3 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml

10
.idea/Torrents.iml generated
View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.12 (torrents)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -1,6 +0,0 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml generated
View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 (torrents)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (torrents)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/Torrents.iml" filepath="$PROJECT_DIR$/.idea/Torrents.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

123
10Library.py Normal file
View File

@@ -0,0 +1,123 @@
import os
import psycopg2
from psycopg2 import extras
from tqdm import tqdm
import time
import sys
# --- KONFIGURACE ---
DB_CONFIG = {
"host": "192.168.1.76", # Doplňte IP adresu svého Unraidu/Postgresu
"database": "files",
"user": "vladimir.buzalka",
"password": "Vlado7309208104++",
"port": "5432"
}
DIRECTORY_TO_SCAN = "//tower/Library"
BATCH_SIZE = 2000 # Zvýšeno na 2000 pro ještě lepší efektivitu u 5M souborů
# --------- ----------
def scan_to_postgres():
conn = None
total_count = 0
files_batch = []
try:
conn = psycopg2.connect(**DB_CONFIG)
cur = conn.cursor()
# Inicializace tabulky
cur.execute("""
CREATE TABLE IF NOT EXISTS library_files
(
id
SERIAL
PRIMARY
KEY,
file_path
TEXT
NOT
NULL,
file_name
TEXT
NOT
NULL,
file_size_bytes
BIGINT,
indexed_at
TIMESTAMP
DEFAULT
CURRENT_TIMESTAMP
);
""")
conn.commit()
print(f"🚀 Zahajuji indexaci: {DIRECTORY_TO_SCAN}")
# Progress bar s automatickým škálováním jednotek (k, M)
pbar = tqdm(
unit=" soubor",
unit_scale=True,
unit_divisor=1000,
desc="Probíhá skenování",
dynamic_ncols=True
)
def save_batch(batch_data):
"""Pomocná funkce pro zápis do DB"""
insert_query = "INSERT INTO library_files (file_path, file_name, file_size_bytes) VALUES %s"
psycopg2.extras.execute_values(cur, insert_query, batch_data)
conn.commit()
# Rychlé procházení pomocí os.scandir
for root, dirs, files in os.walk(DIRECTORY_TO_SCAN):
for name in files:
full_path = os.path.join(root, name)
try:
# Získání metadat (velikost)
file_size = os.path.getsize(full_path)
files_batch.append((full_path, name, file_size))
total_count += 1
if len(files_batch) >= BATCH_SIZE:
save_batch(files_batch)
pbar.update(len(files_batch))
files_batch = []
except (OSError, PermissionError):
continue
# Uložení posledního neúplného zbytku
if files_batch:
save_batch(files_batch)
pbar.update(len(files_batch))
pbar.close()
print(f"\n✅ Hotovo! Celkem zaindexováno {total_count} souborů.")
except KeyboardInterrupt:
print("\n\n⚠️ Skenování přerušeno uživatelem. Ukládám rozpracovaná data...")
if files_batch:
try:
save_batch(files_batch)
print(f"Posledních {len(files_batch)} záznamů uloženo.")
except:
print("Nepodařilo se uložit poslední dávku.")
sys.exit(0)
except Exception as e:
print(f"\n❌ Chyba: {e}")
finally:
if conn:
cur.close()
conn.close()
if __name__ == "__main__":
start_time = time.time()
scan_to_postgres()
duration = time.time() - start_time
print(
f"⏱️ Celkový čas: {duration / 60:.2f} minut (rychlost: {int(5000000 / duration if duration > 0 else 0)} souborů/s)")

View File

@@ -23,7 +23,7 @@ HEADERS = {"User-Agent": USER_AGENT}
DB_CFG = { DB_CFG = {
"host": "192.168.1.76", "host": "192.168.1.76",
"port": 3307, "port": 3307,git remote set-url origin https://gitea.buzalka.cz/administrator/torrents.git
"user": "root", "user": "root",
"password": "Vlado9674+", "password": "Vlado9674+",
"database": "torrents", "database": "torrents",

356
80 TorrentManipulation.py Normal file
View File

@@ -0,0 +1,356 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import pymysql
import qbittorrentapi
import bencodepy
from EmailMessagingGraph import send_mail
# ==============================
# ⚙ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
QBT_CONFIG = {
"host": "192.168.1.76",
"port": 8080,
"username": "admin",
"password": "adminadmin",
}
# ZVÝŠENO NA 100 dle požadavku
MAX_ACTIVE_DOWNLOADS = 250
# JAK DLOUHO ČEKAT?
# Doporučuji alespoň 3 dny (4320 minut).
# Pokud se do 3 dnů neobjeví nikdo, kdo má 100% souboru, je to pravděpodobně mrtvé.
DEAD_TORRENT_DAYS = 3
DEAD_TORRENT_MINUTES = DEAD_TORRENT_DAYS * 24 * 60
DEFAULT_SAVE_PATH = None
MAIL_TO = "vladimir.buzalka@buzalka.cz"
MAX_LIST_ITEMS = 50 # cap lists in email
# ==============================
# 🧮 RUNTIME STATS + LISTS
# ==============================
RUN_START = datetime.now()
stat_synced = 0
stat_completed = 0
stat_dead = 0
stat_enqueued = 0
deleted_completed = [] # list[str]
deleted_dead = [] # list[str]
added_new = [] # list[str]
active_downloading = [] # list[str]
# ==============================
# 🔧 CONNECT
# ==============================
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor(pymysql.cursors.DictCursor)
qb = qbittorrentapi.Client(**QBT_CONFIG)
try:
qb.auth_log_in()
print("✅ Connected to qBittorrent.")
except Exception as e:
raise SystemExit(f"❌ Could not connect to qBittorrent: {e}")
# ==============================
# 🧪 TORRENT VALIDATION
# ==============================
def is_valid_torrent(blob: bytes) -> bool:
try:
data = bencodepy.decode(blob)
return isinstance(data, dict) and b"info" in data
except Exception:
return False
# ==============================
# 🔄 SYNC FROM QB → DB
# ==============================
def sync_qb_to_db():
global stat_synced
torrents = qb.torrents_info()
stat_synced = len(torrents)
for t in torrents:
completion_dt = None
if getattr(t, "completion_on", 0):
try:
completion_dt = datetime.fromtimestamp(t.completion_on)
except Exception:
pass
cursor.execute("""
UPDATE torrents
SET qb_added = 1,
qb_hash = COALESCE(qb_hash, %s),
qb_state = %s,
qb_progress = %s,
qb_savepath = %s,
qb_completed_datetime =
IF(%s IS NOT NULL AND qb_completed_datetime IS NULL, %s, qb_completed_datetime),
qb_last_update = NOW()
WHERE qb_hash = %s OR torrent_hash = %s
""", (
t.hash,
t.state,
float(t.progress) * 100.0,
getattr(t, "save_path", None),
completion_dt,
completion_dt,
t.hash,
t.hash,
))
# ==============================
# 🧹 HANDLE COMPLETED + DEAD
# ==============================
def handle_completed_and_dead():
global stat_completed, stat_dead
# Načteme info o torrentech
torrents = qb.torrents_info()
for t in torrents:
t_hash = t.hash
state = t.state
progress = float(t.progress)
# Získání dostupnosti (availability) - defaultně -1 pokud není k dispozici
availability = float(getattr(t, "availability", -1))
# Získání času přidání
added_ts = getattr(t, "added_on", 0)
added_dt = datetime.fromtimestamp(added_ts) if added_ts > 0 else datetime.now()
age_in_minutes = (datetime.now() - added_dt).total_seconds() / 60
# ---------------------------
# 1. ✔ COMPLETED (Hotovo)
# ---------------------------
if progress >= 1.0 or state in {"completed", "uploading", "stalledUP", "queuedUP"}:
stat_completed += 1
deleted_completed.append(t.name)
try:
# Smažeme z QB, ale necháme data na disku
qb.torrents_delete(torrent_hashes=t_hash, delete_files=False)
except Exception as e:
print(f"⚠️ delete (keep data) failed for {t.name}: {e}")
cursor.execute("""
UPDATE torrents
SET qb_state='completed',
qb_progress=100,
qb_completed_datetime=NOW(),
qb_last_update=NOW()
WHERE qb_hash=%s OR torrent_hash=%s
""", (t_hash, t_hash))
continue
# ---------------------------
# 2. ❌ DEAD (Mrtvý)
# ---------------------------
# Logika: Je to starší než limit? A ZÁROVEŇ je dostupnost < 1 (nikdo nemá celý soubor)?
is_old_enough = age_in_minutes > DEAD_TORRENT_MINUTES
is_unavailable = availability < 1.0
if is_old_enough and is_unavailable:
stat_dead += 1
deleted_dead.append(f"{t.name} (Avail: {availability:.2f})")
try:
# Smažeme z QB včetně nedotažených souborů
qb.torrents_delete(torrent_hashes=t_hash, delete_files=True)
except Exception as e:
print(f"⚠️ delete (files) failed for {t.name}: {e}")
cursor.execute("""
UPDATE torrents
SET qb_state='dead',
qb_last_update=NOW()
WHERE qb_hash=%s OR torrent_hash=%s
""", (t_hash, t_hash))
# ==============================
# 📊 ACTIVE DOWNLOADS
# ==============================
def count_active_downloads():
# Počítáme jen ty, co nejsou hotové (progress < 100%)
return sum(1 for t in qb.torrents_info() if float(t.progress) < 1.0)
def snapshot_active_downloading():
"""
Capture current actively downloading torrents (progress < 100%).
"""
active = []
for t in qb.torrents_info():
prog = float(t.progress)
avail = float(getattr(t, "availability", 0))
if prog < 1.0:
active.append(f"{t.name}{prog * 100:.1f}% — Avail:{avail:.2f}")
return sorted(active)
# ==============================
# ENQUEUE NEW TORRENTS
# ==============================
def enqueue_new_torrents():
global stat_enqueued
active = count_active_downloads()
# Pokud máme plno (100+), nic nepřidáváme
if active >= MAX_ACTIVE_DOWNLOADS:
return
# Kolik slotů zbývá do 100
slots = MAX_ACTIVE_DOWNLOADS - active
cursor.execute("""
SELECT id, torrent_hash, torrent_content, torrent_filename
FROM torrents
WHERE (qb_added IS NULL OR qb_added = 0)
AND torrent_content IS NOT NULL
AND (qb_state IS NULL OR qb_state != 'dead')
ORDER BY added_datetime DESC
LIMIT %s
""", (slots,))
rows = cursor.fetchall()
for row in rows:
blob = row["torrent_content"]
if not blob:
continue
if not is_valid_torrent(blob):
cursor.execute("""
UPDATE torrents
SET qb_state='invalid',
torrent_content=NULL,
qb_last_update=NOW()
WHERE id=%s
""", (row["id"],))
continue
# Add torrent
try:
qb.torrents_add(torrent_files=blob, savepath=DEFAULT_SAVE_PATH)
except Exception as e:
print(f"❌ Failed to add {row['torrent_hash']}: {e}")
continue
stat_enqueued += 1
added_new.append(row.get("torrent_filename") or row["torrent_hash"])
cursor.execute("""
UPDATE torrents
SET qb_added=1,
qb_hash=COALESCE(qb_hash, %s),
qb_state='added',
qb_last_update=NOW()
WHERE id=%s
""", (row["torrent_hash"], row["id"]))
# ==============================
# ✉️ EMAIL HELPERS
# ==============================
def format_list(title: str, items: list[str]) -> list[str]:
lines = []
if not items:
return [f"{title}: (none)"]
lines.append(f"{title}: {len(items)}")
shown = items[:MAX_LIST_ITEMS]
for it in shown:
lines.append(f" - {it}")
if len(items) > MAX_LIST_ITEMS:
lines.append(f" ... (+{len(items) - MAX_LIST_ITEMS} more)")
return lines
# ==============================
# 🏁 MAIN (ONE RUN)
# ==============================
print("🚀 QB worker run started")
try:
sync_qb_to_db()
handle_completed_and_dead()
enqueue_new_torrents()
# Snapshot after enqueue/deletions, so email reflects end-state
active_downloading = snapshot_active_downloading()
finally:
db.close()
# ==============================
# 📧 EMAIL REPORT
# ==============================
RUN_END = datetime.now()
body_lines = [
f"Run started : {RUN_START:%Y-%m-%d %H:%M:%S}",
f"Run finished: {RUN_END:%Y-%m-%d %H:%M:%S}",
"",
f"QB torrents synced : {stat_synced}",
f"Completed removed : {stat_completed}",
f"Dead removed : {stat_dead}",
f"New torrents added : {stat_enqueued}",
f"Active downloads : {len(active_downloading)} (Max: {MAX_ACTIVE_DOWNLOADS})",
"",
]
body_lines += format_list("Deleted (completed, kept data)", deleted_completed)
body_lines.append("")
body_lines += format_list("Deleted (DEAD > 3 days & Avail < 1.0)", deleted_dead)
body_lines.append("")
body_lines += format_list("Newly added to qBittorrent", added_new)
body_lines.append("")
body_lines += format_list("Actively downloading now", active_downloading)
send_mail(
to=MAIL_TO,
subject=f"qBittorrent worker {RUN_START:%Y-%m-%d %H:%M}",
body="\n".join(body_lines),
html=False,
)
print("📧 Email report sent")
print("🎉 DONE")

362
81 TorrentManipulation.py Normal file
View File

@@ -0,0 +1,362 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import pymysql
import qbittorrentapi
import bencodepy
from EmailMessagingGraph import send_mail
# ==============================
# ⚙ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
QBT_CONFIG = {
"host": "192.168.1.76",
"port": 8080,
"username": "admin",
"password": "adminadmin",
}
# ZVÝŠENO NA 100 dle požadavku
MAX_ACTIVE_DOWNLOADS = 250
# JAK DLOUHO ČEKAT?
# Doporučuji alespoň 3 dny (4320 minut).
# Pokud se do 3 dnů neobjeví nikdo, kdo má 100% souboru, je to pravděpodobně mrtvé.
DEAD_TORRENT_DAYS = 3
DEAD_TORRENT_MINUTES = DEAD_TORRENT_DAYS * 24 * 60
DEFAULT_SAVE_PATH = None
MAIL_TO = "vladimir.buzalka@buzalka.cz"
MAX_LIST_ITEMS = 50 # cap lists in email
# ==============================
# 🧮 RUNTIME STATS + LISTS
# ==============================
RUN_START = datetime.now()
stat_synced = 0
stat_completed = 0
stat_dead = 0
stat_enqueued = 0
deleted_completed = [] # list[str]
deleted_dead = [] # list[str]
added_new = [] # list[str]
active_downloading = [] # list[str]
# ==============================
# 🔧 CONNECT
# ==============================
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor(pymysql.cursors.DictCursor)
qb = qbittorrentapi.Client(**QBT_CONFIG)
try:
qb.auth_log_in()
print("✅ Connected to qBittorrent.")
except Exception as e:
raise SystemExit(f"❌ Could not connect to qBittorrent: {e}")
# ==============================
# 🧪 TORRENT VALIDATION
# ==============================
def is_valid_torrent(blob: bytes) -> bool:
try:
data = bencodepy.decode(blob)
return isinstance(data, dict) and b"info" in data
except Exception:
return False
# ==============================
# 🔄 SYNC FROM QB → DB
# ==============================
def sync_qb_to_db():
global stat_synced
torrents = qb.torrents_info(limit=1000)
stat_synced = len(torrents)
for t in torrents:
completion_dt = None
if getattr(t, "completion_on", 0):
try:
completion_dt = datetime.fromtimestamp(t.completion_on)
except Exception:
pass
cursor.execute("""
UPDATE torrents
SET qb_added = 1,
qb_hash = COALESCE(qb_hash, %s),
qb_state = %s,
qb_progress = %s,
qb_savepath = %s,
qb_completed_datetime =
IF(%s IS NOT NULL AND qb_completed_datetime IS NULL, %s, qb_completed_datetime),
qb_last_update = NOW()
WHERE qb_hash = %s OR torrent_hash = %s
""", (
t.hash,
t.state,
float(t.progress) * 100.0,
getattr(t, "save_path", None),
completion_dt,
completion_dt,
t.hash,
t.hash,
))
# ==============================
# 🧹 HANDLE COMPLETED + DEAD
# ==============================
def handle_completed_and_dead():
global stat_completed, stat_dead
# Načteme info o torrentech
torrents = qb.torrents_info(limit=1000)
for t in torrents:
t_hash = t.hash
state = t.state
progress = float(t.progress)
# Získání dostupnosti (availability) - defaultně -1 pokud není k dispozici
availability = float(getattr(t, "availability", -1))
# Získání času přidání
added_ts = getattr(t, "added_on", 0)
added_dt = datetime.fromtimestamp(added_ts) if added_ts > 0 else datetime.now()
age_in_minutes = (datetime.now() - added_dt).total_seconds() / 60
# ---------------------------
# 1. ✔ COMPLETED (Hotovo)
# ---------------------------
if progress >= 1.0 or state in {"completed", "uploading", "stalledUP", "queuedUP"}:
stat_completed += 1
deleted_completed.append(t.name)
try:
# Smažeme z QB, ale necháme data na disku
qb.torrents_delete(torrent_hashes=t_hash, delete_files=False)
except Exception as e:
print(f"⚠️ delete (keep data) failed for {t.name}: {e}")
cursor.execute("""
UPDATE torrents
SET qb_state='completed',
qb_progress=100,
qb_completed_datetime=NOW(),
qb_last_update=NOW()
WHERE qb_hash=%s OR torrent_hash=%s
""", (t_hash, t_hash))
continue
# ---------------------------
# 2. ❌ DEAD (Mrtvý)
# ---------------------------
# LOGIKA:
# A) Starší než limit (3 dny)
# B) Dostupnost < 1.0 (nikdo nemá celý soubor)
# C) Stav je VYLOŽENĚ "stalledDL" (zaseknuté stahování)
# Tím ignorujeme "queuedDL" (čeká ve frontě) i "downloading" (stahuje)
is_old_enough = age_in_minutes > DEAD_TORRENT_MINUTES
is_unavailable = availability < 1.0
is_stalled = (state == "stalledDL")
if is_old_enough and is_unavailable and is_stalled:
stat_dead += 1
deleted_dead.append(f"{t.name} (Avail: {availability:.2f}, State: {state})")
try:
# Smažeme z QB včetně nedotažených souborů
qb.torrents_delete(torrent_hashes=t_hash, delete_files=True)
except Exception as e:
print(f"⚠️ delete (files) failed for {t.name}: {e}")
cursor.execute("""
UPDATE torrents
SET qb_state='dead',
qb_last_update=NOW()
WHERE qb_hash=%s OR torrent_hash=%s
""", (t_hash, t_hash))
# ==============================
# 📊 ACTIVE DOWNLOADS
# ==============================
def count_active_downloads():
# Počítáme jen ty, co nejsou hotové (progress < 100%)
return sum(1 for t in qb.torrents_info(limit=1000) if float(t.progress) < 1.0)
def snapshot_active_downloading():
"""
Capture current actively downloading torrents (progress < 100%).
"""
active = []
for t in qb.torrents_info(limit=1000):
prog = float(t.progress)
avail = float(getattr(t, "availability", 0))
# Zobrazíme i stav, abychom v mailu viděli, zda je queued nebo stalled
state = t.state
if prog < 1.0:
active.append(f"{t.name}{prog * 100:.1f}% — Avail:{avail:.2f} — [{state}]")
return sorted(active)
# ==============================
# ENQUEUE NEW TORRENTS
# ==============================
def enqueue_new_torrents():
global stat_enqueued
active = count_active_downloads()
# Pokud máme plno, nic nepřidáváme
if active >= MAX_ACTIVE_DOWNLOADS:
return
# Kolik slotů zbývá
slots = MAX_ACTIVE_DOWNLOADS - active
cursor.execute("""
SELECT id, torrent_hash, torrent_content, torrent_filename
FROM torrents
WHERE (qb_added IS NULL OR qb_added = 0)
AND torrent_content IS NOT NULL
AND (qb_state IS NULL OR qb_state != 'dead')
ORDER BY added_datetime DESC
LIMIT %s
""", (slots,))
rows = cursor.fetchall()
for row in rows:
blob = row["torrent_content"]
if not blob:
continue
if not is_valid_torrent(blob):
cursor.execute("""
UPDATE torrents
SET qb_state='invalid',
torrent_content=NULL,
qb_last_update=NOW()
WHERE id=%s
""", (row["id"],))
continue
# Add torrent
try:
qb.torrents_add(torrent_files=blob, savepath=DEFAULT_SAVE_PATH)
except Exception as e:
print(f"❌ Failed to add {row['torrent_hash']}: {e}")
continue
stat_enqueued += 1
added_new.append(row.get("torrent_filename") or row["torrent_hash"])
cursor.execute("""
UPDATE torrents
SET qb_added=1,
qb_hash=COALESCE(qb_hash, %s),
qb_state='added',
qb_last_update=NOW()
WHERE id=%s
""", (row["torrent_hash"], row["id"]))
# ==============================
# ✉️ EMAIL HELPERS
# ==============================
def format_list(title: str, items: list[str]) -> list[str]:
lines = []
if not items:
return [f"{title}: (none)"]
lines.append(f"{title}: {len(items)}")
shown = items[:MAX_LIST_ITEMS]
for it in shown:
lines.append(f" - {it}")
if len(items) > MAX_LIST_ITEMS:
lines.append(f" ... (+{len(items) - MAX_LIST_ITEMS} more)")
return lines
# ==============================
# 🏁 MAIN (ONE RUN)
# ==============================
print("🚀 QB worker run started")
try:
sync_qb_to_db()
handle_completed_and_dead()
enqueue_new_torrents()
# Snapshot after enqueue/deletions, so email reflects end-state
active_downloading = snapshot_active_downloading()
finally:
db.close()
# ==============================
# 📧 EMAIL REPORT
# ==============================
RUN_END = datetime.now()
body_lines = [
f"Run started : {RUN_START:%Y-%m-%d %H:%M:%S}",
f"Run finished: {RUN_END:%Y-%m-%d %H:%M:%S}",
"",
f"QB torrents synced : {stat_synced}",
f"Completed removed : {stat_completed}",
f"Dead removed : {stat_dead}",
f"New torrents added : {stat_enqueued}",
f"Active downloads : {len(active_downloading)} (Max: {MAX_ACTIVE_DOWNLOADS})",
"",
]
body_lines += format_list("Deleted (completed, kept data)", deleted_completed)
body_lines.append("")
body_lines += format_list("Deleted (DEAD > 3 days & StalledDL & Avail < 1.0)", deleted_dead)
body_lines.append("")
body_lines += format_list("Newly added to qBittorrent", added_new)
body_lines.append("")
body_lines += format_list("Actively downloading now", active_downloading)
send_mail(
to=MAIL_TO,
subject=f"qBittorrent worker {RUN_START:%Y-%m-%d %H:%M}",
body="\n".join(body_lines),
html=False,
)
print("📧 Email report sent")
print("🎉 DONE")

153
82 Reporting.py Normal file
View File

@@ -0,0 +1,153 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import pandas as pd
import os
from datetime import datetime
# ==============================
# ⚙ KONFIGURACE
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4"
}
# Cílová složka (používám 'r' před řetězcem pro bezpečné načtení zpětných lomítek)
OUTPUT_DIR = r"u:\Dropbox\!!!Days\Downloads Z230"
FILE_NAME = f"Torrents_Report_{datetime.now():%Y-%m-%d}.xlsx"
# Spojíme cestu a název souboru
FULL_OUTPUT_PATH = os.path.join(OUTPUT_DIR, FILE_NAME)
# ==============================
# 📥 NAČTENÍ DAT
# ==============================
def get_data():
print("⏳ Připojuji se k databázi a stahuji data...")
conn = pymysql.connect(**DB_CONFIG)
query = """
SELECT
id,
category,
title_visible AS 'Název',
size_pretty AS 'Velikost',
added_datetime AS 'Přidáno do DB',
qb_state AS 'Stav v QB',
qb_progress AS 'Postup (%)',
qb_savepath AS 'Cesta na disku',
qb_completed_datetime AS 'Dokončeno',
qb_last_update AS 'Poslední info'
FROM torrents
ORDER BY added_datetime DESC
"""
df = pd.read_sql(query, conn)
conn.close()
return df
# ==============================
# 🎨 FORMÁTOVÁNÍ EXCELU
# ==============================
def auto_adjust_columns(writer, df, sheet_name):
"""Bezpečné automatické nastavení šířky sloupců"""
worksheet = writer.sheets[sheet_name]
for idx, col in enumerate(df.columns):
series = df[col]
max_len = len(str(col)) # minimálně délka hlavičky
for val in series:
if val is None or (isinstance(val, float) and pd.isna(val)):
length = 0
else:
length = len(str(val))
if length > max_len:
max_len = length
max_len = min(max_len + 2, 60)
worksheet.set_column(idx, idx, max_len)
# ==============================
# 🚀 HLAVNÍ LOGIKA
# ==============================
def generate_report():
# 1. Kontrola cesty
if not os.path.exists(OUTPUT_DIR):
print(f"❌ CHYBA: Cílová složka neexistuje nebo není dostupná: {OUTPUT_DIR}")
print(" Ujistěte se, že je disk U: připojen.")
return
df = get_data()
print(f"✅ Načteno {len(df)} záznamů.")
# 2. ÚPRAVA DAT
df['Postup (%)'] = df['Postup (%)'].fillna(0).astype(float).round(1)
# 3. FILTROVÁNÍ
# A) DEAD
mask_dead = df['Stav v QB'].isin(['dead', 'invalid'])
df_dead = df[mask_dead].copy()
# B) COMPLETED
mask_completed = (
(df['Stav v QB'] == 'completed') |
(df['Postup (%)'] >= 100)
) & (~mask_dead)
df_completed = df[mask_completed].copy()
# C) ACTIVE / QUEUED
mask_active = (~mask_dead) & (~mask_completed)
df_active = df[mask_active].copy()
# Seřazení
df_active = df_active.sort_values(by=['Postup (%)', 'Přidáno do DB'], ascending=[False, False])
df_completed = df_completed.sort_values(by='Dokončeno', ascending=False)
df_dead = df_dead.sort_values(by='Poslední info', ascending=False)
# 4. EXPORT
print(f"💾 Ukládám do: {FULL_OUTPUT_PATH}")
try:
with pd.ExcelWriter(FULL_OUTPUT_PATH, engine='xlsxwriter') as writer:
# List 1: Ke stažení
df_active.to_excel(writer, sheet_name='Ke stažení', index=False)
auto_adjust_columns(writer, df_active, 'Ke stažení')
# List 2: Hotovo
df_completed.to_excel(writer, sheet_name='Hotovo', index=False)
auto_adjust_columns(writer, df_completed, 'Hotovo')
# List 3: Dead
df_dead.to_excel(writer, sheet_name='Smazáno (Dead)', index=False)
auto_adjust_columns(writer, df_dead, 'Smazáno (Dead)')
# List 4: Vše
df.to_excel(writer, sheet_name='Kompletní DB', index=False)
auto_adjust_columns(writer, df, 'Kompletní DB')
print("🎉 Hotovo! Report byl úspěšně uložen na disk U:")
except Exception as e:
print(f"❌ Chyba při zápisu souboru: {e}")
if __name__ == "__main__":
generate_report()

View File

@@ -0,0 +1,310 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import time
import re
import urllib.parse as urlparse
from pathlib import Path
import json
import requests
import datetime
import sys
# Ensure this file exists in your directory
from EmailMessagingGraph import send_mail
# ============================================================
# RUNTIME INFO
# ============================================================
RUN_START = datetime.datetime.now()
processed_count = 0
new_torrent_count = 0
existing_torrent_count = 0
new_titles = []
print(f"🕒 Run started at {RUN_START:%Y-%m-%d %H:%M:%S}")
sys.stdout.flush()
# ============================================================
# 1) MySQL CONNECTION
# ============================================================
db = pymysql.connect(
host="192.168.1.50",
port=3306,
user="root",
password="Vlado9674+",
database="torrents",
charset="utf8mb4",
autocommit=True,
)
cursor = db.cursor()
# ============================================================
# 2) Selenium setup
# ============================================================
COOKIE_FILE = Path("sktorrent_cookies.json")
# Updated to standard torrents.php as requested
BASE_URL = (
"https://sktorrent.eu/torrent/torrents.php"
"?active=0&category=24&order=data&by=DESC&zaner=&jazyk="
)
chrome_options = Options()
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--disable-popup-blocking")
chrome_options.add_argument("--disable-extensions")
driver = webdriver.Chrome(options=chrome_options)
driver.set_window_position(380, 50)
driver.set_window_size(1350, 1000)
driver.get("https://sktorrent.eu")
if COOKIE_FILE.exists():
with open(COOKIE_FILE, "r", encoding="utf-8") as f:
cookies = json.load(f)
for c in cookies:
driver.add_cookie(c)
print("🍪 Cookies loaded.")
else:
print("⚠️ Cookie file not found login may be required.")
# ============================================================
# 3) requests.Session from Selenium cookies
# ============================================================
requests_session = requests.Session()
for ck in driver.get_cookies():
requests_session.cookies.set(ck["name"], ck["value"])
print("🔗 Requests session initialized.")
# ============================================================
# 4) Popup handler
# ============================================================
def close_popup_if_any():
try:
driver.execute_script("try { interstitialBox.closeit(); } catch(e) {}")
time.sleep(0.5)
except Exception:
pass
# ============================================================
# 5) Parse one torrent row (MODIFIED)
# ============================================================
def parse_row(cells):
# --- 1. INITIALIZE ---
torrent_hash = None
download_url = None
category = cells[0].text.strip()
try:
# --- 2. EXTRACT DOWNLOAD URL (Column 1) ---
download_a = cells[1].find_element(By.TAG_NAME, "a")
download_url = download_a.get_attribute("href")
parsed_dl = urlparse.urlparse(download_url)
dl_query = urlparse.parse_qs(parsed_dl.query)
torrent_filename = dl_query.get("f", ["unknown.torrent"])[0]
# --- 3. EXTRACT DETAILS & HASH (Column 2) ---
title_links = cells[2].find_elements(By.TAG_NAME, "a")
if not title_links:
return None
a_tag = title_links[0]
visible_name = a_tag.text.strip()
full_title = a_tag.get_attribute("title")
details_link = a_tag.get_attribute("href")
parsed = urlparse.urlparse(details_link)
query = urlparse.parse_qs(parsed.query)
if "id" not in query:
return None
torrent_hash = query["id"][0]
# --- 4. EXTRACT SIZE & DATE ---
text_block = cells[2].get_attribute("innerText")
text_block_clean = " ".join(text_block.split())
size_match = re.search(r"Velkost ([0-9\.]+ ?[KMG]B)", text_block_clean, re.IGNORECASE)
added_match = re.search(r"Pridany (.+?)(?:\sObrázok|$)", text_block_clean, re.IGNORECASE)
size_pretty = size_match.group(1) if size_match else None
added_pretty = added_match.group(1) if added_match else None
added_mysql = None
if added_pretty:
clean = added_pretty.replace(" o ", " ").strip()
parts = clean.split(" ")
if len(parts) >= 2:
date_part, time_part = parts[0], parts[1]
if len(time_part.split(":")) == 2: time_part += ":00"
try:
d, m, y = date_part.split("/")
added_mysql = f"{y}-{m}-{d} {time_part}"
except: pass
# --- 5. IMAGE & STATS ---
img_link = None
try:
image_a = cells[2].find_element(By.XPATH, ".//a[contains(text(),'Obrázok')]")
mouseover = image_a.get_attribute("onmouseover")
img_match = re.search(r"src=([^ ]+)", mouseover)
if img_match:
img_link = img_match.group(1).replace("'", "").strip()
if img_link.startswith("//"): img_link = "https:" + img_link
except: pass
seeders_number = int(cells[4].find_element(By.TAG_NAME, "a").text.strip())
seeders_link = cells[4].find_element(By.TAG_NAME, "a").get_attribute("href")
leechers_number = int(cells[5].find_element(By.TAG_NAME, "a").text.strip())
leechers_link = cells[5].find_element(By.TAG_NAME, "a").get_attribute("href")
# --- 6. DATABASE CHECK & DOWNLOAD ---
cursor.execute("SELECT torrent_content FROM torrents WHERE torrent_hash=%s", (torrent_hash,))
db_row = cursor.fetchone()
already_have_torrent = db_row is not None and db_row[0] is not None
torrent_content = None
if not already_have_torrent:
time.sleep(2)
try:
resp = requests_session.get(download_url, timeout=10)
resp.raise_for_status()
torrent_content = resp.content
except Exception as e:
print(f" ⚠️ Download failed for {visible_name}: {e}")
return {
"torrent_hash": torrent_hash,
"details_link": details_link,
"download_url": download_url,
"category": category,
"title_visible": visible_name,
"title_full": full_title,
"size_pretty": size_pretty,
"added_datetime": added_mysql,
"preview_image": img_link,
"seeders": seeders_number,
"seeders_link": seeders_link,
"leechers": leechers_number,
"leechers_link": leechers_link,
"torrent_filename": torrent_filename,
"torrent_content": torrent_content if not already_have_torrent else None,
"is_new_torrent": not already_have_torrent,
}
except Exception as e:
print(f"⚠️ parse_row logic failed: {e}")
return None
# ============================================================
# 6) INSERT SQL (MODIFIED)
# ============================================================
insert_sql = """
INSERT INTO torrents (
torrent_hash, details_link, download_url, category, title_visible, title_full,
size_pretty, added_datetime, preview_image,
seeders, seeders_link, leechers, leechers_link,
torrent_filename, torrent_content
) VALUES (
%(torrent_hash)s, %(details_link)s, %(download_url)s, %(category)s, %(title_visible)s, %(title_full)s,
%(size_pretty)s, %(added_datetime)s, %(preview_image)s,
%(seeders)s, %(seeders_link)s, %(leechers)s, %(leechers_link)s,
%(torrent_filename)s, %(torrent_content)s
)
ON DUPLICATE KEY UPDATE
seeders = VALUES(seeders),
leechers = VALUES(leechers),
download_url = VALUES(download_url),
torrent_content = COALESCE(VALUES(torrent_content), torrent_content);
"""
# Note: COALESCE(torrent_content, VALUES(torrent_content))
# keeps the old value if the new one is NULL,
# but updates it if the old one was NULL and the new one is binary.
# ============================================================
# 7) PROCESS ALL PAGES
# ============================================================
TOTAL_PAGES = 226
for page_num in range(0, TOTAL_PAGES):
current_url = f"{BASE_URL}&page={page_num}"
print(f"\n🌐 Loading Page Index {page_num} (Page {page_num + 1}/{TOTAL_PAGES})")
driver.get(current_url)
time.sleep(2)
close_popup_if_any()
# Find table rows
rows = driver.find_elements(By.CSS_SELECTOR, "table tr")
# FILTER: Only keep rows that have 7 columns AND a link in the 2nd column (index 1)
# This automatically discards headers and empty space rows.
real_rows = []
for r in rows:
cells = r.find_elements(By.TAG_NAME, "td")
if len(cells) == 7 and cells[1].find_elements(By.TAG_NAME, "a"):
real_rows.append(cells)
if not real_rows:
print("⚠️ No data rows found on this page. Ending loop.")
break
# === INSERT THIS LINE HERE ===
page_new_items = 0
# =============================
for cells in real_rows:
try:
data = parse_row(cells)
# ... rest of your logic ...
except Exception as e:
print(f"⚠️ parse_row failed: {e}")
continue
if not data: continue
processed_count += 1
if data["is_new_torrent"]:
new_torrent_count += 1
page_new_items += 1
new_titles.append(data["title_visible"])
print(f"💾 NEW: {data['title_visible']}")
else:
existing_torrent_count += 1
print(f"♻️ UPDATING: {data['title_visible']}")
cursor.execute(insert_sql, data)
# # If an entire page is old news, we can stop the deep crawl
# if page_new_items == 0 and page_num > 0:
# print("🛑 Page contained only known items. Sync complete.")
# break
time.sleep(1)
# ============================================================
# 8) SEND EMAIL REPORT
# ============================================================
RUN_END = datetime.datetime.now()
subject = f"SKTorrent run {RUN_START:%Y-%m-%d %H:%M}"
body = (
f"Run started: {RUN_START:%Y-%m-%d %H:%M:%S}\n"
f"Run finished: {RUN_END:%Y-%m-%d %H:%M:%S}\n\n"
f"Processed torrents: {processed_count}\n"
f"New torrents saved: {new_torrent_count}\n"
f"Existing torrents updated: {existing_torrent_count}\n"
)
if new_titles:
body += "\nNew torrents list:\n- " + "\n- ".join(new_titles)
send_mail(to="vladimir.buzalka@buzalka.cz", subject=subject, body=body, html=False)
print("📧 Email report sent.")
driver.quit()
print("🎉 DONE")

292
91 5threaddownloader.py Normal file
View File

@@ -0,0 +1,292 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import time
import re
import urllib.parse as urlparse
from pathlib import Path
import json
import requests
import datetime
import sys
import threading
from concurrent.futures import ThreadPoolExecutor
# Ensure this file exists in your directory
from EmailMessagingGraph import send_mail
# ============================================================
# CONFIGURATION
# ============================================================
TOTAL_PAGES = 226
THREADS = 5
COOKIE_FILE = Path("sktorrent_cookies.json")
# Database settings
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
BASE_URL = (
"https://sktorrent.eu/torrent/torrents.php"
"?active=0&category=24&order=data&by=DESC&zaner=&jazyk="
)
# Global counters for reporting (Thread-safe lock needed)
stats_lock = threading.Lock()
stats = {
"processed": 0,
"new": 0,
"existing": 0,
"new_titles": []
}
# ============================================================
# 1) WORKER FUNCTION (Runs inside each thread)
# ============================================================
def process_page_chunk(page_indices, thread_id):
"""
This function creates its OWN browser and OWN database connection.
It processes the specific list of page numbers assigned to it.
"""
print(f"🧵 [Thread-{thread_id}] Starting. Assigned {len(page_indices)} pages.")
# --- A. Setup Independent DB Connection ---
try:
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor()
except Exception as e:
print(f"❌ [Thread-{thread_id}] DB Connection failed: {e}")
return
# --- B. Setup Independent Selenium Driver ---
chrome_options = Options()
# HEADLESS MODE is safer for 5 threads to avoid popping up 5 windows
chrome_options.add_argument("--headless=new")
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--disable-popup-blocking")
chrome_options.add_argument("--disable-extensions")
chrome_options.add_argument("--log-level=3") # Reduce noise
driver = webdriver.Chrome(options=chrome_options)
driver.set_window_size(1350, 1000)
# --- C. Login / Cookies ---
driver.get("https://sktorrent.eu")
if COOKIE_FILE.exists():
with open(COOKIE_FILE, "r", encoding="utf-8") as f:
cookies = json.load(f)
for c in cookies:
driver.add_cookie(c)
# --- D. Requests Session ---
requests_session = requests.Session()
for ck in driver.get_cookies():
requests_session.cookies.set(ck["name"], ck["value"])
# --- E. Helper: Parse Row (Local scope) ---
def parse_row(cells):
try:
category = cells[0].text.strip()
# Download URL
download_a = cells[1].find_element(By.TAG_NAME, "a")
download_url = download_a.get_attribute("href")
parsed_dl = urlparse.urlparse(download_url)
dl_query = urlparse.parse_qs(parsed_dl.query)
torrent_filename = dl_query.get("f", ["unknown.torrent"])[0]
# Details & Hash
title_links = cells[2].find_elements(By.TAG_NAME, "a")
if not title_links: return None
a_tag = title_links[0]
visible_name = a_tag.text.strip()
full_title = a_tag.get_attribute("title")
details_link = a_tag.get_attribute("href")
parsed = urlparse.urlparse(details_link)
query = urlparse.parse_qs(parsed.query)
if "id" not in query: return None
torrent_hash = query["id"][0]
# Size & Date
text_block = cells[2].get_attribute("innerText")
clean_text = " ".join(text_block.split())
size_match = re.search(r"Velkost ([0-9\.]+ ?[KMG]B)", clean_text, re.IGNORECASE)
added_match = re.search(r"Pridany (.+?)(?:\sObrázok|$)", clean_text, re.IGNORECASE)
size_pretty = size_match.group(1) if size_match else None
added_mysql = None
if added_match:
clean = added_match.group(1).replace(" o ", " ").strip()
parts = clean.split(" ")
if len(parts) >= 2:
d, m, y = parts[0].split("/")
t = parts[1] + ":00" if len(parts[1].split(":")) == 2 else parts[1]
try:
added_mysql = f"{y}-{m}-{d} {t}"
except:
pass
# Image
img_link = None
try:
img_a = cells[2].find_element(By.XPATH, ".//a[contains(text(),'Obrázok')]")
img_src = re.search(r"src=([^ ]+)", img_a.get_attribute("onmouseover"))
if img_src:
img_link = img_src.group(1).replace("'", "").strip()
if img_link.startswith("//"): img_link = "https:" + img_link
except:
pass
# Stats
seeders = int(cells[4].find_element(By.TAG_NAME, "a").text.strip())
seeders_link = cells[4].find_element(By.TAG_NAME, "a").get_attribute("href")
leechers = int(cells[5].find_element(By.TAG_NAME, "a").text.strip())
leechers_link = cells[5].find_element(By.TAG_NAME, "a").get_attribute("href")
# Check DB
cursor.execute("SELECT torrent_content FROM torrents WHERE torrent_hash=%s", (torrent_hash,))
row = cursor.fetchone()
already_have_file = row is not None and row[0] is not None
content = None
if not already_have_file:
# Politeness sleep only if downloading
time.sleep(1)
try:
r = requests_session.get(download_url, timeout=10)
r.raise_for_status()
content = r.content
except:
pass
return {
"torrent_hash": torrent_hash, "details_link": details_link, "download_url": download_url,
"category": category, "title_visible": visible_name, "title_full": full_title,
"size_pretty": size_pretty, "added_datetime": added_mysql, "preview_image": img_link,
"seeders": seeders, "seeders_link": seeders_link, "leechers": leechers, "leechers_link": leechers_link,
"torrent_filename": torrent_filename, "torrent_content": content,
"is_new_torrent": not already_have_file
}
except Exception:
return None
# --- F. Loop through Assigned Pages ---
for page_num in page_indices:
url = f"{BASE_URL}&page={page_num}"
print(f" 🔄 [Thread-{thread_id}] Scraping Page {page_num}")
try:
driver.get(url)
# Close popup (simplified JS)
driver.execute_script("try { interstitialBox.closeit(); } catch(e) {}")
# Row Filtering
rows = driver.find_elements(By.CSS_SELECTOR, "table tr")
real_rows = []
for r in rows:
cs = r.find_elements(By.TAG_NAME, "td")
if len(cs) == 7 and cs[1].find_elements(By.TAG_NAME, "a"):
real_rows.append(cs)
if not real_rows:
print(f" ⚠️ [Thread-{thread_id}] Page {page_num} empty.")
continue
# Process Rows
for cells in real_rows:
data = parse_row(cells)
if not data: continue
# Update Global Stats safely
with stats_lock:
stats["processed"] += 1
if data["is_new_torrent"]:
stats["new"] += 1
stats["new_titles"].append(data["title_visible"])
else:
stats["existing"] += 1
# Insert SQL
sql = """
INSERT INTO torrents (
torrent_hash, details_link, download_url, category, title_visible, title_full,
size_pretty, added_datetime, preview_image,
seeders, seeders_link, leechers, leechers_link,
torrent_filename, torrent_content
) VALUES (
%(torrent_hash)s, %(details_link)s, %(download_url)s, %(category)s, %(title_visible)s, %(title_full)s,
%(size_pretty)s, %(added_datetime)s, %(preview_image)s,
%(seeders)s, %(seeders_link)s, %(leechers)s, %(leechers_link)s,
%(torrent_filename)s, %(torrent_content)s
)
ON DUPLICATE KEY UPDATE
seeders = VALUES(seeders),
leechers = VALUES(leechers),
download_url = VALUES(download_url),
torrent_content = COALESCE(VALUES(torrent_content), torrent_content);
"""
cursor.execute(sql, data)
except Exception as e:
print(f" 💥 [Thread-{thread_id}] Error on page {page_num}: {e}")
# Cleanup
driver.quit()
db.close()
print(f"🏁 [Thread-{thread_id}] Finished assigned pages.")
# ============================================================
# 2) MAIN EXECUTION
# ============================================================
if __name__ == "__main__":
RUN_START = datetime.datetime.now()
print(f"🚀 Starting Multithreaded Scraper with {THREADS} threads...")
# 1. Distribute pages among threads
# Example: If 226 pages and 5 threads, each gets ~45 pages
all_pages = list(range(TOTAL_PAGES))
chunk_size = len(all_pages) // THREADS + 1
chunks = [all_pages[i:i + chunk_size] for i in range(0, len(all_pages), chunk_size)]
# 2. Start Threads
with ThreadPoolExecutor(max_workers=THREADS) as executor:
futures = []
for i, page_chunk in enumerate(chunks):
if page_chunk: # Only start if chunk is not empty
futures.append(executor.submit(process_page_chunk, page_chunk, i + 1))
# Wait for all to finish
for f in futures:
f.result()
# 3. Final Report
RUN_END = datetime.datetime.now()
print("\n✅ All threads completed.")
body = (
f"Run started: {RUN_START:%Y-%m-%d %H:%M:%S}\n"
f"Run finished: {RUN_END:%Y-%m-%d %H:%M:%S}\n\n"
f"Processed torrents: {stats['processed']}\n"
f"New torrents saved: {stats['new']}\n"
f"Existing torrents updated: {stats['existing']}\n"
)
if stats["new_titles"]:
body += "\nNew torrents list:\n- " + "\n- ".join(stats["new_titles"])
send_mail(to="vladimir.buzalka@buzalka.cz", subject=f"SKTorrent Multi-Thread Run", body=body, html=False)
print("📧 Email report sent.")

View File

@@ -0,0 +1,212 @@
import pymysql
import requests
import json
import time
import random
import os
import re
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor
from threading import Lock
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
# ============================================================
# KONFIGURACE
# ============================================================
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
COOKIE_FILE = Path("sktorrent_cookies.json")
BACKUP_DIR = "saved_torrents" # Adresář pro lokální zálohu
THREADS = 5 # Počet vláken
# Globální zámek pro výpisy do konzole, aby se nepřepisovaly
print_lock = Lock()
stats = {"fixed": 0, "failed": 0, "saved_to_disk": 0}
# ============================================================
# POMOCNÉ FUNKCE
# ============================================================
def sanitize_filename(name):
"""Odstraní z názvu souboru nepovolené znaky"""
# Povolíme jen písmena, čísla, tečky, pomlčky a mezery
clean = re.sub(r'[^\w\s\.-]', '', name)
return clean.strip()[:100] # Ořízneme na 100 znaků pro jistotu
def ensure_backup_dir():
"""Vytvoří adresář pro torrenty, pokud neexistuje"""
if not os.path.exists(BACKUP_DIR):
os.makedirs(BACKUP_DIR)
print(f"📁 Vytvořen adresář pro zálohu: {os.path.abspath(BACKUP_DIR)}")
def get_browser_identity():
"""
Spustí Selenium (Chrome) JEN JEDNOU, aby získal validní
User-Agent a čerstvé Cookies pro threads.
"""
print("🤖 Startuji Selenium pro získání identity prohlížeče...")
opts = Options()
opts.add_argument("--headless=new")
opts.add_argument("--disable-gpu")
driver = webdriver.Chrome(options=opts)
# Jdeme na web nastavit doménu pro cookies
driver.get("https://sktorrent.eu")
# Načteme cookies ze souboru
if COOKIE_FILE.exists():
with open(COOKIE_FILE, "r", encoding="utf-8") as f:
cookies_list = json.load(f)
for c in cookies_list:
driver.add_cookie(c)
driver.refresh()
time.sleep(2)
# Exportujeme identitu
user_agent = driver.execute_script("return navigator.userAgent;")
browser_cookies = driver.get_cookies()
driver.quit()
print("✅ Identita získána.")
return user_agent, browser_cookies
# ============================================================
# WORKER (Pracovní vlákno)
# ============================================================
def worker_task(rows_chunk, thread_id, user_agent, cookies_list):
"""
Tato funkce běží v každém vlákně zvlášť.
"""
# 1. Vytvoření vlastní Session pro toto vlákno
session = requests.Session()
session.headers.update({"User-Agent": user_agent})
for c in cookies_list:
session.cookies.set(c['name'], c['value'])
# 2. Vlastní připojení k DB (nutné pro thread-safety)
try:
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor()
except Exception as e:
with print_lock:
print(f"❌ [Thread-{thread_id}] Chyba DB připojení: {e}")
return
for row in rows_chunk:
t_hash, url, title = row
# Ochrana: krátká náhodná pauza, aby 5 vláken nezabilo server
time.sleep(random.uniform(0.5, 2.0))
try:
# Stažení
resp = session.get(url, timeout=15)
if resp.status_code == 403:
with print_lock:
print(f"⛔ [Thread-{thread_id}] 403 Forbidden! {title[:20]}...")
stats["failed"] += 1
continue
resp.raise_for_status()
content = resp.content
if len(content) > 100:
# A) Uložit do DB (BLOB)
sql = "UPDATE torrents SET torrent_content = %s WHERE torrent_hash = %s"
cursor.execute(sql, (content, t_hash))
# B) Uložit na DISK (Soubor)
clean_name = sanitize_filename(title)
# Přidáme kousek hashe do názvu, aby se nepřepsaly soubory se stejným jménem
filename = f"{clean_name}_{t_hash[:6]}.torrent"
file_path = os.path.join(BACKUP_DIR, filename)
with open(file_path, "wb") as f:
f.write(content)
with print_lock:
print(f"✅ [Thread-{thread_id}] OK: {clean_name}")
stats["fixed"] += 1
stats["saved_to_disk"] += 1
else:
with print_lock:
print(f"⚠️ [Thread-{thread_id}] Prázdný soubor: {title}")
stats["failed"] += 1
except Exception as e:
with print_lock:
print(f"❌ [Thread-{thread_id}] Chyba: {title[:20]}... -> {e}")
stats["failed"] += 1
db.close()
with print_lock:
print(f"🏁 [Thread-{thread_id}] Dokončil práci.")
# ============================================================
# HLAVNÍ LOOP
# ============================================================
if __name__ == "__main__":
ensure_backup_dir()
# 1. Získat data z DB
print("🔍 Načítám seznam chybějících souborů z DB...")
main_db = pymysql.connect(**DB_CONFIG)
with main_db.cursor() as c:
# Hledáme ty, co mají URL, ale nemají obsah
c.execute(
"SELECT torrent_hash, download_url, title_visible FROM torrents WHERE torrent_content IS NULL AND download_url IS NOT NULL")
all_rows = c.fetchall()
main_db.close()
total = len(all_rows)
print(f"📋 K opravě: {total} položek.")
if total == 0:
print("🎉 Není co opravovat.")
exit()
# 2. Získat "Super Identitu" přes Selenium (jen jednou)
u_agent, browser_cookies = get_browser_identity()
# 3. Rozdělit práci pro 5 vláken
chunk_size = total // THREADS + 1
chunks = [all_rows[i:i + chunk_size] for i in range(0, total, chunk_size)]
print(f"🚀 Spouštím {THREADS} vláken (ukládání do DB + do složky '{BACKUP_DIR}')...")
# 4. Spustit multithreading
with ThreadPoolExecutor(max_workers=THREADS) as executor:
futures = []
for i, chunk in enumerate(chunks):
if chunk:
# Každému vláknu předáme kus práce + identitu prohlížeče
futures.append(executor.submit(worker_task, chunk, i + 1, u_agent, browser_cookies))
# Čekáme na dokončení
for f in futures:
f.result()
print("\n" + "=" * 40)
print(f"🏁 DOKONČENO")
print(f"✅ Opraveno v DB: {stats['fixed']}")
print(f"💾 Uloženo na disk: {stats['saved_to_disk']}")
print(f"❌ Chyby: {stats['failed']}")
print(f"📁 Soubory najdeš v: {os.path.abspath(BACKUP_DIR)}")
print("=" * 40)

View File

@@ -0,0 +1,133 @@
import pymysql
import requests
import json
import time
import random
import os
import re
from pathlib import Path
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
# ============================================================
# KONFIGURACE
# ============================================================
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
COOKIE_FILE = Path("sktorrent_cookies.json")
BACKUP_DIR = "saved_torrents"
# ============================================================
# POMOCNÉ FUNKCE
# ============================================================
def sanitize_filename(name):
clean = re.sub(r'[^\w\s\.-]', '', name)
return clean.strip()[:100]
def get_browser_identity():
print("🤖 Startuji Selenium (Single Thread Mode)...")
opts = Options()
opts.add_argument("--headless=new")
opts.add_argument("--disable-gpu")
driver = webdriver.Chrome(options=opts)
driver.get("https://sktorrent.eu")
if COOKIE_FILE.exists():
with open(COOKIE_FILE, "r", encoding="utf-8") as f:
cookies_list = json.load(f)
for c in cookies_list:
driver.add_cookie(c)
driver.refresh()
time.sleep(2)
user_agent = driver.execute_script("return navigator.userAgent;")
browser_cookies = driver.get_cookies()
driver.quit()
return user_agent, browser_cookies
# ============================================================
# MAIN
# ============================================================
if __name__ == "__main__":
if not os.path.exists(BACKUP_DIR):
os.makedirs(BACKUP_DIR)
# 1. Načíst zbývající chyby
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor()
cursor.execute(
"SELECT torrent_hash, download_url, title_visible FROM torrents WHERE torrent_content IS NULL AND download_url IS NOT NULL")
rows = cursor.fetchall()
print(f"📋 Zbývá opravit: {len(rows)} položek.")
if not rows:
print("🎉 Hotovo! Vše je staženo.")
exit()
# 2. Získat identitu
ua, cookies = get_browser_identity()
session = requests.Session()
session.headers.update({"User-Agent": ua})
for c in cookies:
session.cookies.set(c['name'], c['value'])
# 3. Pomalá smyčka (1 vlákno)
success = 0
dead_links = 0
print("🚀 Spouštím jemné dočištění...")
for i, row in enumerate(rows):
t_hash, url, title = row
print(f"[{i + 1}/{len(rows)}] {title[:50]}...", end=" ")
try:
# Delší pauza pro stabilitu
time.sleep(random.uniform(1.5, 3.0))
resp = session.get(url, timeout=20) # Delší timeout
if resp.status_code == 404:
print("❌ 404 Nenalezeno (soubor na serveru neexistuje)")
dead_links += 1
continue
if resp.status_code != 200:
print(f"❌ Chyba {resp.status_code}")
continue
content = resp.content
if len(content) > 100:
# DB
cursor.execute("UPDATE torrents SET torrent_content = %s WHERE torrent_hash = %s", (content, t_hash))
# Disk
fname = f"{sanitize_filename(title)}_{t_hash[:6]}.torrent"
with open(os.path.join(BACKUP_DIR, fname), "wb") as f:
f.write(content)
print("✅ OK")
success += 1
else:
print("⚠️ Prázdný soubor")
except Exception as e:
print(f"❌ Selhalo: {e}")
db.close()
print("\n" + "=" * 30)
print(f"🏁 FINÁLE: Opraveno {success} z {len(rows)}")
if dead_links > 0:
print(f"💀 Mrtvé odkazy (404): {dead_links} (ty už opravit nejdou)")

View File

@@ -0,0 +1,158 @@
import pymysql
import bencodepy
import os
from pathlib import Path
# ============================================================
# CONFIGURATION
# ============================================================
# Your network path (Use raw string r"..." for backslashes)
# PHYSICAL_DIR = Path(r"\\tower\torrents\downloads")
PHYSICAL_DIR = Path(r"\\tower1\#Colddata\Porno")
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
# ============================================================
# HELPER FUNCTIONS
# ============================================================
def decode_bytes(b):
"""
Decodes bytes from Bencode into a string.
Tries UTF-8 first, then common fallbacks.
"""
if isinstance(b, str): return b
encodings = ['utf-8', 'windows-1250', 'latin-1', 'cp1252']
for enc in encodings:
try:
return b.decode(enc)
except:
continue
return b.decode('utf-8', errors='ignore')
def check_torrent_in_filesystem(torrent_blob, root_path):
"""
Parses the binary BLOB, calculates expected paths,
and checks if they exist in the root_path.
"""
try:
# Decode the binary BLOB
data = bencodepy.decode(torrent_blob)
info = data.get(b'info')
if not info: return False
# Get the name of the root file/folder defined in the torrent
name = decode_bytes(info.get(b'name'))
# Calculate expected location
target_path = root_path / name
# 1. Check if the main path exists
if not target_path.exists():
return False
# 2. Size Verification (Basic)
# If it's a single file
if b'files' not in info:
expected_size = info[b'length']
real_size = target_path.stat().st_size
# Allow 1% variance or 1KB (sometimes filesystems vary slightly)
if abs(real_size - expected_size) < 4096:
return True
return False
# If it's a multi-file torrent (folder)
else:
# If the folder exists, we assume it's mostly good,
# but let's check at least one file inside to be sure it's not empty.
files = info[b'files']
if not files: return True # Empty folder torrent? rare but possible.
# Check the first file in the list
first_file_path = target_path.joinpath(*[decode_bytes(p) for p in files[0][b'path']])
return first_file_path.exists()
except Exception as e:
# If Bencode fails or path is weird
return False
# ============================================================
# MAIN EXECUTION
# ============================================================
if __name__ == "__main__":
if not PHYSICAL_DIR.exists():
print(f"❌ ERROR: Cannot access path: {PHYSICAL_DIR}")
print("Make sure the drive is mapped or the network path is accessible.")
exit()
print(f"📂 Scanning storage: {PHYSICAL_DIR}")
print("🚀 Connecting to Database...")
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor()
# 1. Get all torrents that have content (BLOB)
# We only select ID and Content to keep memory usage reasonable
cursor.execute(
"SELECT torrent_hash, title_visible, torrent_content FROM torrents WHERE torrent_content IS NOT NULL")
rows = cursor.fetchall()
total = len(rows)
print(f"📋 Analysing {total} torrents from database against disk files...")
found_count = 0
missing_count = 0
# 2. Iterate and Check
updates = [] # Store successful hashes to batch update later
for index, row in enumerate(rows):
t_hash, title, blob = row
is_downloaded = check_torrent_in_filesystem(blob, PHYSICAL_DIR)
if is_downloaded:
found_count += 1
updates.append(t_hash)
# Print only every 50th line to reduce clutter, or if found
# print(f"✅ Found: {title[:50]}")
else:
missing_count += 1
if index % 100 == 0:
print(f" Processed {index}/{total} ... (Found: {found_count})")
# 3. Batch Update Database
print(f"\n💾 Updating Database: Marking {len(updates)} torrents as 'physical_exists = 1'...")
# Reset everything to 0 first (in case you deleted files since last run)
cursor.execute("UPDATE torrents SET physical_exists = 0")
if updates:
# Update in chunks of 1000 to be safe
chunk_size = 1000
for i in range(0, len(updates), chunk_size):
chunk = updates[i:i + chunk_size]
format_strings = ','.join(['%s'] * len(chunk))
cursor.execute(f"UPDATE torrents SET physical_exists = 1 WHERE torrent_hash IN ({format_strings})",
tuple(chunk))
db.commit()
db.close()
print("\n" + "=" * 40)
print(f"🏁 SCAN COMPLETE")
print(f"✅ Physically Available: {found_count}")
print(f"❌ Missing / Not Downloaded: {missing_count}")
print(f"📊 Completion Rate: {int((found_count / total) * 100)}%")
print("=" * 40)

View File

@@ -37,8 +37,8 @@ sys.stdout.flush()
# ============================================================ # ============================================================
db = pymysql.connect( db = pymysql.connect(
host="192.168.1.76", host="192.168.1.50",
port=3307, port=3306,
user="root", user="root",
password="Vlado9674+", password="Vlado9674+",
database="torrents", database="torrents",

View File

@@ -32,7 +32,7 @@ QBT_CONFIG = {
} }
MAX_ACTIVE_DOWNLOADS = 10 MAX_ACTIVE_DOWNLOADS = 10
DEAD_TORRENT_MINUTES = 5 DEAD_TORRENT_MINUTES = 60
DEFAULT_SAVE_PATH = None DEFAULT_SAVE_PATH = None
MAIL_TO = "vladimir.buzalka@buzalka.cz" MAIL_TO = "vladimir.buzalka@buzalka.cz"

View File

@@ -0,0 +1,150 @@
import pymysql
import re
import time
import qbittorrentapi
# ============================================================
# KONFIGURACE
# ============================================================
MAX_SIZE_GB = 950
QBT_URL = "https://vladob.zen.usbx.me/qbittorrent"
QBT_USER = "vladob"
QBT_PASS = "jCni3U6d#y4bfcm"
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
# ============================================================
# POMOCNÉ FUNKCE
# ============================================================
def parse_size_to_gb(size_str):
"""Převede text '1.5 GB' nebo '500 MB' na float v GB"""
if not size_str: return 0.0
s = str(size_str).upper().replace(",", ".").strip()
match = re.search(r"([\d\.]+)", s)
if not match: return 0.0
val = float(match.group(1))
if "TB" in s: return val * 1024
if "GB" in s: return val
if "MB" in s: return val / 1024
if "KB" in s: return val / 1024 / 1024
return 0.0
# ============================================================
# HLAVNÍ LOGIKA
# ============================================================
def main():
print(f"🚀 Plánuji přímý upload z DB (Limit: {MAX_SIZE_GB} GB, řazeno dle seederů)...")
# 1. Načtení dat z DB
# Stahujeme i BLOB (torrent_content), takže to může chvilku trvat
db = pymysql.connect(**DB_CONFIG)
cursor = db.cursor()
print("⏳ Načítám data z MySQL...")
sql = """
SELECT torrent_hash, title_visible, size_pretty, seeders, torrent_content
FROM torrents
WHERE physical_exists = 0 AND torrent_content IS NOT NULL
ORDER BY seeders DESC
"""
cursor.execute(sql)
rows = cursor.fetchall()
db.close()
print(f"🔍 Nalezeno {len(rows)} kandidátů. Vybírám ty nejlepší...")
# 2. Výběr do kapacity 950 GB
selected_torrents = []
total_size_gb = 0.0
for row in rows:
t_hash, title, size_str, seeders, content = row
size_gb = parse_size_to_gb(size_str)
# Pojistka proti nesmyslně velkým souborům nebo chybám v parsování
if size_gb == 0 and "MB" not in str(size_str).upper() and "KB" not in str(size_str).upper():
pass
# Kontrola limitu
if total_size_gb + size_gb > MAX_SIZE_GB:
# Jakmile narazíme na něco, co se nevejde, končíme výběr (protože jsou seřazeny dle priority)
print(f"🛑 Limit naplněn! '{title}' ({size_gb:.2f} GB) by přesáhl {MAX_SIZE_GB} GB.")
break
selected_torrents.append({
"filename": f"{t_hash}.torrent", # Virtuální název souboru
"content": content, # Binární data
"title": title,
"size": size_gb,
"seeders": seeders
})
total_size_gb += size_gb
# 3. Report
print("-" * 40)
print(f"📦 Vybráno: {len(selected_torrents)} torrentů")
print(f"💾 Celková velikost: {total_size_gb:.2f} GB / {MAX_SIZE_GB} GB")
if selected_torrents:
avg_seeders = sum(t['seeders'] for t in selected_torrents) / len(selected_torrents)
print(f"⚡ Průměrně seederů: {avg_seeders:.1f}")
print("-" * 40)
if not selected_torrents:
print("Nic k nahrání.")
exit()
confirm = input("❓ Nahrát tento výběr na Seedbox? (ano/ne): ")
if confirm.lower() not in ['ano', 'y', 'yes']:
print("❌ Zrušeno.")
exit()
# 4. Připojení k qBittorrent
try:
qbt = qbittorrentapi.Client(
host=QBT_URL,
username=QBT_USER,
password=QBT_PASS,
VERIFY_WEBUI_CERTIFICATE=False
)
qbt.auth_log_in()
print("✅ Připojeno k Seedboxu.")
except Exception as e:
print(f"❌ Chyba připojení: {e}")
exit()
# 5. Odeslání dat
print("🚀 Odesílám...")
success_count = 0
for i, item in enumerate(selected_torrents):
try:
# Posíláme binární data přímo (tváříme se, že posíláme soubor)
# formát: {'nazev_souboru.torrent': b'binarni_data...'}
file_dict = {item['filename']: item['content']}
qbt.torrents_add(torrent_files=file_dict, is_paused=False)
print(f"[{i + 1}/{len(selected_torrents)}] 📤 {item['title']} ({item['size']:.1f} GB)")
success_count += 1
time.sleep(0.2) # Malá pauza pro stabilitu API
except Exception as e:
print(f"❌ Chyba u {item['title']}: {e}")
print("\n✅ HOTOVO.")
print("Torrenty jsou na Seedboxu. Až se stáhnou, stáhni je domů a spusť skript 99_Scan...")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import bencodepy
# ===============================
# DB CONFIG
# ===============================
DB_CONFIG = dict(
host="192.168.1.50",
user="root",
password="Vlado9674+",
database="torrents",
charset="utf8mb4"
)
LIMIT = 5 # kolik torrentů zobrazit
# ===============================
# TORRENT PARSER
# ===============================
def parse_torrent(blob):
data = bencodepy.decode(blob)
info = data[b'info']
files = []
# multi-file torrent
if b'files' in info:
for f in info[b'files']:
path = "/".join(p.decode(errors="ignore") for p in f[b'path'])
size = f[b'length']
files.append((path, size))
# single-file torrent
else:
name = info[b'name'].decode(errors="ignore")
size = info[b'length']
files.append((name, size))
return files
# ===============================
# MAIN
# ===============================
def main():
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor()
cur.execute(f"""
SELECT id, title_visible, qb_savepath, torrent_content
FROM torrents
WHERE torrent_content IS NOT NULL
LIMIT {LIMIT}
""")
rows = cur.fetchall()
for tid, title, savepath, blob in rows:
print("\n" + "="*80)
print(f"Torrent ID : {tid}")
print(f"Title : {title}")
print(f"Savepath : {savepath}")
try:
files = parse_torrent(blob)
print(f"Files inside torrent: {len(files)}")
for path, size in files:
print(f" {size:>12} B {path}")
except Exception as e:
print("ERROR parsing torrent:", e)
cur.close()
conn.close()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import pymysql
import bencodepy
from tqdm import tqdm
# =====================================
# CONFIG
# =====================================
ULTRACC_ROOT = r"\\tower\torrents\ultracc"
DRY_MODE = False
DB_CONFIG = dict(
host="192.168.1.50",
user="root",
password="Vlado9674+",
database="torrents",
charset="utf8mb4"
)
# =====================================
# TORRENT PARSER
# =====================================
def parse_torrent(blob):
data = bencodepy.decode(blob)
info = data[b'info']
files = []
# multi file
if b'files' in info:
for f in info[b'files']:
rel_path = "/".join(p.decode(errors="ignore") for p in f[b'path'])
size = f[b'length']
files.append((rel_path, size))
multi = True
else:
name = info[b'name'].decode(errors="ignore")
size = info[b'length']
files.append((name, size))
multi = False
return files, multi
# =====================================
# BUILD FILESYSTEM INDEX
# =====================================
import pickle
import os
INDEX_FILE = r"U:\PycharmProjects\Torrents\fs_index_ultracc.pkl"
FORCE_REBUILD = False
def build_fs_index():
# ============================
# LOAD EXISTING INDEX
# ============================
if os.path.exists(INDEX_FILE) and not FORCE_REBUILD:
print("Načítám uložený filesystem index...")
with open(INDEX_FILE, "rb") as f:
index = pickle.load(f)
print(f"Načten index ({len(index)} klíčů)")
return index
# ============================
# BUILD NEW INDEX
# ============================
print("Indexuji filesystem...")
index = {}
pocet = 0
for root, _, files in os.walk(ULTRACC_ROOT):
for f in files:
full = os.path.join(root, f)
try:
size = os.path.getsize(full)
except OSError:
continue
key = (f.lower(), size)
pocet += 1
if pocet % 100 == 0:
print(pocet)
index.setdefault(key, []).append(full)
print(f"Index obsahuje {len(index)} unikátních souborů")
# ============================
# SAVE INDEX
# ============================
print("Ukládám index na disk...")
with open(INDEX_FILE, "wb") as f:
pickle.dump(index, f, protocol=pickle.HIGHEST_PROTOCOL)
print("Index uložen")
return index
# =====================================
# VALIDACE ROOTU
# =====================================
def validate_root(root, torrent_files):
for rel_path, size in torrent_files:
check = os.path.join(root, rel_path.replace("/", os.sep))
if not os.path.exists(check):
return False
return True
# =====================================
# MAIN
# =====================================
def main():
fs_index = build_fs_index()
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor()
cur.execute("""
SELECT id, torrent_content
FROM torrents
WHERE torrent_content IS NOT NULL and physical_exists=FALSE
""")
rows = cur.fetchall()
print(f"Torrentů ke kontrole: {len(rows)}")
success = 0
for tid, blob in tqdm(rows):
try:
torrent_files, multi = parse_torrent(blob)
# vezmeme největší soubor pro lookup
rel_path, size = max(torrent_files, key=lambda x: x[1])
fname = os.path.basename(rel_path).lower()
key = (fname, size)
if key not in fs_index:
continue
found = False
for full_path in fs_index[key]:
if multi:
# ROOT = odečtení relativní cesty
root = full_path[:-len(rel_path)]
root = root.rstrip("\\/")
else:
# single file
root = os.path.dirname(full_path)
if validate_root(root, torrent_files):
found = True
success += 1
print(f"[FOUND] Torrent {tid}{root}")
if not DRY_MODE:
cur.execute("""
UPDATE torrents
SET physical_exists = 1
WHERE id = %s
""", (tid,))
break
if not found:
pass
except Exception as e:
print(f"ERROR torrent {tid}: {e}")
if not DRY_MODE:
conn.commit()
print(f"Celkem nalezeno: {success}")
cur.close()
conn.close()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,64 @@
import pymysql
import pymysql.cursors
def get_unfinished_torrents():
# Konfigurace připojení
connection_config = {
'host': '192.168.1.76',
'user': 'root',
'password': 'Vlado9674+',
'database': 'torrents',
'port': 3307,
'cursorclass': pymysql.cursors.DictCursor # Vrací výsledky jako slovník
}
try:
# Navázání spojení
connection = pymysql.connect(**connection_config)
with connection.cursor() as cursor:
# SQL Dotaz
sql = """
SELECT
title_visible,
qb_progress,
qb_state,
size_pretty,
added_datetime
FROM torrents
WHERE qb_added = 1
AND qb_progress < 1
AND qb_state NOT IN ('seeding', 'uploading', 'stalledUP', 'pausedUP', 'completed')
ORDER BY qb_progress DESC;
"""
cursor.execute(sql)
results = cursor.fetchall()
print(f"\n--- NEDOKONČENÉ TORRENTY (Port {connection_config['port']}) ---")
if not results:
print("Vše je hotovo nebo nic neběží.")
else:
for row in results:
# Předpokládáme, že qb_progress je float (0.0 až 1.0)
progress_pct = row['qb_progress'] * 100
print(f"Torrent: {row['title_visible']}")
print(f"Stav: {row['qb_state']}")
print(f"Pokrok: {progress_pct:.2f}%")
print(f"Velikost: {row['size_pretty']}")
print("-" * 40)
except pymysql.MySQLError as e:
print(f"Chyba při komunikaci s DB: {e}")
finally:
if 'connection' in locals():
connection.close()
print("Spojení s databází bylo uzavřeno.")
if __name__ == "__main__":
get_unfinished_torrents()

BIN
library_paths.db Normal file

Binary file not shown.

BIN
paths.pkl Normal file

Binary file not shown.