Compare commits

..

6 Commits

Author SHA1 Message Date
administrator 8782ec1bde notebook vb 2026-04-03 08:55:10 +02:00
administrator 240bc0d83f MedicusWithClaude: detailní poznámky k test_import_FINAL.py
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 08:53:59 +02:00
administrator 05b98a5cec MedicusWithClaude: přejmenování test_import_single → test_import_FINAL
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 08:43:14 +02:00
administrator e04bf6172a Backup/BackupAll: přidány verze 01 a 02
01 = původní (každá DB vlastní ZIP)
02 = nový (hlavní DB vlastní ZIP, všechny externí DB → jeden ZIP)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 08:29:15 +02:00
administrator 91edb6f084 Merge: BackupExterniDB01 + BackupExterniDB02 do master
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 08:22:03 +02:00
administrator 3eaf8709cf Backup/BackupExterniDB: přidány dva skripty pro zálohu ext. DB
01 = původní (každý FBK → vlastní ZIP)
02 = nový (všechny FBK → jeden ZIP)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 08:21:50 +02:00
6 changed files with 937 additions and 0 deletions
+185
View File
@@ -0,0 +1,185 @@
import subprocess
import os
from pathlib import Path
from datetime import datetime
import zipfile
import time
import traceback
from EmailMessagingGraph import send_mail
# ============================================================
# CONFIG
# ============================================================
GBAK = r"C:\Program Files\Firebird\Firebird_2_5_CGM\bin\gbak.exe"
FB_USER = "SYSDBA"
FB_PASS = "masterkey"
FB_PORT = "3050"
MAIN_DB = r"localhost/3050:C:\medicus 3\data\MEDICUS.FDB"
EXT_DIR = Path(r"U:\externi")
BACKUP_DIR = Path(r"U:\medicusbackup")
MAIL_TO = "vladimir.buzalka@buzalka.cz"
CHUNK = 8 * 1024 * 1024 # 8 MB
# ============================================================
# HELPERS
# ============================================================
def gbak_and_zip(label: str, db_conn: str, fbk: Path, zipf: Path, log: Path) -> dict:
"""
Run gbak backup and ZIP the result.
Returns a result dict.
"""
result = {
"label": label,
"ok": False,
"fbk_size": 0,
"zip_size": 0,
"t_gbak": 0,
"t_zip": 0,
"error": None,
}
# 1) GBAK
print(f"GBAK: {label} ... ", end="", flush=True)
t0 = time.time()
cmd = [GBAK, "-b", "-user", FB_USER, "-pas", FB_PASS, db_conn, str(fbk), "-v"]
with open(log, "w", encoding="utf-8") as f:
subprocess.run(cmd, stdout=f, stderr=subprocess.STDOUT, check=True)
result["t_gbak"] = time.time() - t0
result["fbk_size"] = fbk.stat().st_size
print(f"OK ({result['t_gbak']:.0f}s, {result['fbk_size']/1024/1024:.1f} MB)")
# 2) ZIP
t1 = time.time()
processed = 0
fbk_size = result["fbk_size"]
with zipfile.ZipFile(zipf, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zf:
zi = zipfile.ZipInfo(fbk.name)
zi.compress_type = zipfile.ZIP_DEFLATED
with zf.open(zi, "w", force_zip64=True) as z:
with open(fbk, "rb") as src:
while buf := src.read(CHUNK):
z.write(buf)
processed += len(buf)
pct = processed * 100 / fbk_size
print(f"\r ZIP {label}: {pct:6.2f}%", end="", flush=True)
print()
result["t_zip"] = time.time() - t1
result["zip_size"] = zipf.stat().st_size
# 3) Delete FBK + LOG
fbk.unlink()
log.unlink()
result["ok"] = True
return result
def format_result(r: dict) -> str:
ratio = 100 * (1 - r["zip_size"] / r["fbk_size"]) if r["fbk_size"] else 0
return (
f" {r['label']}: "
f"FBK {r['fbk_size']/1024/1024:.1f} MB → "
f"ZIP {r['zip_size']/1024/1024:.1f} MB "
f"({ratio:.0f}% komprese, "
f"gbak {r['t_gbak']:.0f}s, zip {r['t_zip']:.0f}s)"
)
# ============================================================
# MAIN
# ============================================================
def main():
BACKUP_DIR.mkdir(parents=True, exist_ok=True)
now = datetime.now()
ts = now.strftime("%Y-%m-%d_%H-%M-%S")
backed_up = []
errors = []
# ----------------------------------------------------------
# 1) Hlavní DB MEDICUS.FDB
# ----------------------------------------------------------
fbk = BACKUP_DIR / f"MEDICUS_{ts}.fbk"
zipf = BACKUP_DIR / f"MEDICUS_{ts}.zip"
log = BACKUP_DIR / f"MEDICUS_{ts}.log"
try:
r = gbak_and_zip("MEDICUS", MAIN_DB, fbk, zipf, log)
backed_up.append(r)
except Exception:
errors.append({"label": "MEDICUS", "error": traceback.format_exc()})
for f in (fbk, log):
if f.exists():
f.unlink()
# ----------------------------------------------------------
# 2) Externí DB MEDICUS_FILES_*.fdb
# ----------------------------------------------------------
fdb_all = sorted(
set(EXT_DIR.glob("MEDICUS_FILES_*.fdb")) | set(EXT_DIR.glob("MEDICUS_FILES_*.FDB")),
key=lambda p: p.name.lower(),
)
for fdb in fdb_all:
name = fdb.stem
fbk = BACKUP_DIR / f"{name}_{ts}.fbk"
zipf = BACKUP_DIR / f"{name}_{ts}.zip"
log = BACKUP_DIR / f"{name}_{ts}.log"
db_conn = f"localhost/{FB_PORT}:{fdb}"
try:
r = gbak_and_zip(name, db_conn, fbk, zipf, log)
backed_up.append(r)
except Exception:
errors.append({"label": name, "error": traceback.format_exc()})
for f in (fbk, log):
if f.exists():
f.unlink()
# ----------------------------------------------------------
# Report
# ----------------------------------------------------------
total = 1 + len(fdb_all)
report = [
f"Backup Medicus {now.strftime('%d.%m.%Y %H:%M')}",
f"Celkem DB: {total} | OK: {len(backed_up)} | Chyby: {len(errors)}",
f"Výstupní adresář: {BACKUP_DIR}",
"",
]
if backed_up:
report.append("--- Zálohováno ---")
total_zip = sum(r["zip_size"] for r in backed_up)
for r in backed_up:
report.append(format_result(r))
report.append(f" Celková velikost ZIP: {total_zip/1024/1024:.1f} MB")
report.append("")
if errors:
report.append("--- CHYBY ---")
for e in errors:
report.append(f" {e['label']}:\n{e['error']}")
report.append("")
has_errors = bool(errors)
subject = (
f"{'X' if has_errors else 'OK'} MEDICUS backup "
f"{len(backed_up)}/{total}"
+ (f" {len(errors)} chyb" if has_errors else "")
)
send_mail(MAIL_TO, subject, "\n".join(report))
print("\n" + "\n".join(report))
if errors:
raise RuntimeError(f"{len(errors)} backup(s) failed")
if __name__ == "__main__":
main()
+235
View File
@@ -0,0 +1,235 @@
import subprocess
import os
from pathlib import Path
from datetime import datetime
import zipfile
import time
import traceback
from EmailMessagingGraph import send_mail
# ============================================================
# CONFIG
# ============================================================
GBAK = r"C:\Program Files\Firebird\Firebird_2_5_CGM\bin\gbak.exe"
FB_USER = "SYSDBA"
FB_PASS = "masterkey"
FB_PORT = "3050"
MAIN_DB = r"localhost/3050:C:\medicus 3\data\MEDICUS.FDB"
EXT_DIR = Path(r"c:\medicusext")
BACKUP_DIR = Path(r"U:\medicusbackup")
MAIL_TO = "vladimir.buzalka@buzalka.cz"
CHUNK = 8 * 1024 * 1024 # 8 MB
# ============================================================
# HELPERS
# ============================================================
def run_gbak(label: str, db_conn: str, fbk: Path, log: Path) -> dict:
"""Run gbak, return result dict (without zip info)."""
result = {
"label": label,
"ok": False,
"fbk": fbk,
"fbk_size": 0,
"zip_size": 0,
"t_gbak": 0,
"t_zip": 0,
"error": None,
}
print(f"GBAK: {label} ... ", end="", flush=True)
t0 = time.time()
cmd = [GBAK, "-b", "-user", FB_USER, "-pas", FB_PASS, db_conn, str(fbk), "-v"]
with open(log, "w", encoding="utf-8") as f:
subprocess.run(cmd, stdout=f, stderr=subprocess.STDOUT, check=True)
result["t_gbak"] = time.time() - t0
result["fbk_size"] = fbk.stat().st_size
print(f"OK ({result['t_gbak']:.0f}s, {result['fbk_size']/1024/1024:.1f} MB)")
result["ok"] = True
return result
def zip_single(label: str, fbk: Path, zipf: Path) -> tuple[int, float]:
"""ZIP one FBK into its own ZIP. Returns (zip_size, t_zip)."""
t1 = time.time()
processed = 0
fbk_size = fbk.stat().st_size
with zipfile.ZipFile(zipf, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zf:
zi = zipfile.ZipInfo(fbk.name)
zi.compress_type = zipfile.ZIP_DEFLATED
with zf.open(zi, "w", force_zip64=True) as z:
with open(fbk, "rb") as src:
while buf := src.read(CHUNK):
z.write(buf)
processed += len(buf)
pct = processed * 100 / fbk_size
print(f"\r ZIP {label}: {pct:6.2f}%", end="", flush=True)
print()
return zipf.stat().st_size, time.time() - t1
def zip_multiple(fbk_results: list[dict], zipf: Path) -> tuple[int, float]:
"""ZIP multiple FBK files into one ZIP. Returns (zip_size, t_zip)."""
t1 = time.time()
total_fbk_size = sum(r["fbk_size"] for r in fbk_results)
total_processed = 0
with zipfile.ZipFile(zipf, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zf:
for r in fbk_results:
fbk = r["fbk"]
zi = zipfile.ZipInfo(fbk.name)
zi.compress_type = zipfile.ZIP_DEFLATED
with zf.open(zi, "w", force_zip64=True) as z:
with open(fbk, "rb") as src:
while buf := src.read(CHUNK):
z.write(buf)
total_processed += len(buf)
pct = total_processed * 100 / total_fbk_size
print(f"\r ZIP {fbk.name}: {pct:6.2f}%", end="", flush=True)
print()
return zipf.stat().st_size, time.time() - t1
def format_result(r: dict) -> str:
ratio = 100 * (1 - r["zip_size"] / r["fbk_size"]) if r["fbk_size"] else 0
return (
f" {r['label']}: "
f"FBK {r['fbk_size']/1024/1024:.1f} MB → "
f"ZIP {r['zip_size']/1024/1024:.1f} MB "
f"({ratio:.0f}% komprese, "
f"gbak {r['t_gbak']:.0f}s, zip {r['t_zip']:.0f}s)"
)
# ============================================================
# MAIN
# ============================================================
def main():
BACKUP_DIR.mkdir(parents=True, exist_ok=True)
now = datetime.now()
ts = now.strftime("%Y-%m-%d_%H-%M-%S")
backed_up = []
errors = []
# ----------------------------------------------------------
# 1) Hlavní DB MEDICUS.FDB → vlastní ZIP
# ----------------------------------------------------------
fbk = BACKUP_DIR / f"MEDICUS_{ts}.fbk"
zipf = BACKUP_DIR / f"MEDICUS_{ts}.zip"
log = BACKUP_DIR / f"MEDICUS_{ts}.log"
try:
r = run_gbak("MEDICUS", MAIN_DB, fbk, log)
log.unlink()
zip_size, t_zip = zip_single("MEDICUS", fbk, zipf)
fbk.unlink()
r["zip_size"] = zip_size
r["t_zip"] = t_zip
backed_up.append(r)
except Exception:
errors.append({"label": "MEDICUS", "fbk_size": 0, "zip_size": 0, "t_gbak": 0, "t_zip": 0, "error": traceback.format_exc()})
for f in (fbk, log):
if f.exists():
f.unlink()
# ----------------------------------------------------------
# 2) Externí DB MEDICUS_FILES_*.fdb → všechny do jednoho ZIP
# ----------------------------------------------------------
fdb_all = sorted(
set(EXT_DIR.glob("MEDICUS_FILES_*.fdb")) | set(EXT_DIR.glob("MEDICUS_FILES_*.FDB")),
key=lambda p: p.name.lower(),
)
ext_results = []
for fdb in fdb_all:
name = fdb.stem
fbk = BACKUP_DIR / f"{name}_{ts}.fbk"
log = BACKUP_DIR / f"{name}_{ts}.log"
db_conn = f"localhost/{FB_PORT}:{fdb}"
try:
r = run_gbak(name, db_conn, fbk, log)
log.unlink()
ext_results.append(r)
except Exception:
errors.append({"label": name, "fbk_size": 0, "zip_size": 0, "t_gbak": 0, "t_zip": 0, "error": traceback.format_exc()})
for f in (fbk, log):
if f.exists():
f.unlink()
# ZIP všechny externí FBK do jednoho souboru
if ext_results:
ext_zip = BACKUP_DIR / f"MEDICUS_FILES_{ts}.zip"
print(f"\nZIP externích DB → {ext_zip.name}")
try:
zip_size, t_zip = zip_multiple(ext_results, ext_zip)
for r in ext_results:
r["zip_size"] = zip_size # sdílená velikost výsledného ZIPu
r["t_zip"] = t_zip
r["fbk"].unlink()
backed_up.append(r)
except Exception:
errors.append({"label": "MEDICUS_FILES (zip)", "fbk_size": 0, "zip_size": 0, "t_gbak": 0, "t_zip": 0, "error": traceback.format_exc()})
for r in ext_results:
if r["fbk"].exists():
r["fbk"].unlink()
# ----------------------------------------------------------
# Report
# ----------------------------------------------------------
total = 1 + len(fdb_all)
report = [
f"Backup Medicus {now.strftime('%d.%m.%Y %H:%M')}",
f"Celkem DB: {total} | OK: {len(backed_up)} | Chyby: {len(errors)}",
f"Výstupní adresář: {BACKUP_DIR}",
"",
]
if backed_up:
report.append("--- Zálohováno ---")
# Hlavní DB
main_results = [r for r in backed_up if r["label"] == "MEDICUS"]
ext_backed = [r for r in backed_up if r["label"] != "MEDICUS"]
for r in main_results:
report.append(format_result(r))
if ext_backed:
total_ext_fbk = sum(r["fbk_size"] for r in ext_backed)
ext_zip_size = ext_backed[0]["zip_size"] if ext_backed else 0
ratio = 100 * (1 - ext_zip_size / total_ext_fbk) if total_ext_fbk else 0
report.append(f" Externí DB ({len(ext_backed)} souborů):")
for r in ext_backed:
report.append(f" {r['label']}: FBK {r['fbk_size']/1024/1024:.1f} MB (gbak {r['t_gbak']:.0f}s)")
report.append(
f" → společný ZIP: {ext_zip_size/1024/1024:.1f} MB "
f"({ratio:.0f}% komprese, zip {ext_backed[0]['t_zip']:.0f}s)"
)
total_zip = sum(r["zip_size"] for r in main_results) + (ext_backed[0]["zip_size"] if ext_backed else 0)
report.append(f" Celková velikost ZIP: {total_zip/1024/1024:.1f} MB")
report.append("")
if errors:
report.append("--- CHYBY ---")
for e in errors:
report.append(f" {e['label']}:\n{e['error']}")
report.append("")
has_errors = bool(errors)
subject = (
f"{'X' if has_errors else 'OK'} MEDICUS backup "
f"{len(backed_up)}/{total}"
+ (f" {len(errors)} chyb" if has_errors else "")
)
send_mail(MAIL_TO, subject, "\n".join(report))
print("\n" + "\n".join(report))
if errors:
raise RuntimeError(f"{len(errors)} backup(s) failed")
if __name__ == "__main__":
main()
+172
View File
@@ -0,0 +1,172 @@
import subprocess
import json
import os
from pathlib import Path
from datetime import datetime
import zipfile
import time
import traceback
from EmailMessagingGraph import send_mail
# ============================================================
# CONFIG
# ============================================================
GBAK = r"C:\Program Files\Firebird\Firebird_2_5_CGM\bin\gbak.exe"
FB_USER = "SYSDBA"
FB_PASS = "masterkey"
FB_PORT = "3050"
SRC_DIR = Path(r"c:\medicusext")
BACKUP_DIR = Path(r"U:\medicusbackup")
MAIL_TO = "vladimir.buzalka@buzalka.cz"
CHUNK = 8 * 1024 * 1024 # 8 MB
# ============================================================
# MAIN
# ============================================================
def main():
BACKUP_DIR.mkdir(parents=True, exist_ok=True)
now = datetime.now()
ts = now.strftime("%Y-%m-%d_%H-%M-%S")
# Find all FDB files (case-insensitive)
fdb_files = sorted(SRC_DIR.glob("MEDICUS_FILES_*.fdb"))
fdb_upper = sorted(SRC_DIR.glob("MEDICUS_FILES_*.FDB"))
fdb_all = sorted(
set(fdb_files + fdb_upper),
key=lambda p: p.name.lower(),
)
backed_up = []
errors = []
for fdb in fdb_all:
name = fdb.stem
fbk = BACKUP_DIR / f"{name}_{ts}.fbk"
zipf = BACKUP_DIR / f"{name}_{ts}.zip"
log = BACKUP_DIR / f"{name}_{ts}.log"
result = {
"file": fdb.name,
"ok": False,
"fbk_size": 0,
"zip_size": 0,
"t_gbak": 0,
"t_zip": 0,
"error": None,
}
try:
# 1) GBAK
print(f"GBAK: {fdb.name} ... ", end="", flush=True)
t0 = time.time()
db_conn = f"localhost/{FB_PORT}:{fdb}"
cmd = [
GBAK, "-b",
"-user", FB_USER,
"-pas", FB_PASS,
db_conn, str(fbk),
"-v",
]
with open(log, "w", encoding="utf-8") as f:
subprocess.run(
cmd, stdout=f, stderr=subprocess.STDOUT, check=True,
)
result["t_gbak"] = time.time() - t0
result["fbk_size"] = fbk.stat().st_size
print(f"OK ({result['t_gbak']:.0f}s)")
# 2) ZIP
t1 = time.time()
processed = 0
fbk_size = result["fbk_size"]
with zipfile.ZipFile(
zipf, "w",
compression=zipfile.ZIP_DEFLATED,
compresslevel=9,
) as zf:
zi = zipfile.ZipInfo(fbk.name)
zi.compress_type = zipfile.ZIP_DEFLATED
with zf.open(zi, "w", force_zip64=True) as z:
with open(fbk, "rb") as src:
while buf := src.read(CHUNK):
z.write(buf)
processed += len(buf)
pct = processed * 100 / fbk_size
print(
f"\r ZIP {name}: {pct:6.2f}%",
end="", flush=True,
)
print()
result["t_zip"] = time.time() - t1
result["zip_size"] = zipf.stat().st_size
# 3) DELETE FBK + LOG
fbk.unlink()
log.unlink()
result["ok"] = True
backed_up.append(result)
except Exception:
result["error"] = traceback.format_exc()
errors.append(result)
for f in (fbk, log):
if f.exists():
f.unlink()
# Build report
report = []
report.append(f"Backup externi DB - {now.strftime('%d.%m.%Y %H:%M')}")
report.append(f"Celkem souboru: {len(fdb_all)}")
report.append(f"Zalohovano: {len(backed_up)}")
report.append(f"Chyby: {len(errors)}")
report.append("")
if backed_up:
report.append("--- Backed up ---")
total_zip = 0
for r in backed_up:
total_zip += r["zip_size"]
report.append(
f" {r['file']}: "
f"FBK {r['fbk_size']/1024/1024:.1f} MB -> "
f"ZIP {r['zip_size']/1024/1024:.1f} MB "
f"(gbak {r['t_gbak']:.0f}s, zip {r['t_zip']:.0f}s)"
)
report.append(f" Total ZIP: {total_zip / 1024 / 1024:.1f} MB")
report.append("")
if errors:
report.append("--- ERRORS ---")
for r in errors:
report.append(f" {r['file']}: {r['error']}")
report.append("")
# Send email
has_errors = len(errors) > 0
subject = (
f"{'X' if has_errors else 'OK'} "
f"MEDICUS externi DB - "
f"backup {len(backed_up)}/{len(fdb_all)}"
f"{f', {len(errors)} errors' if has_errors else ''}"
)
send_mail(MAIL_TO, subject, "\n".join(report))
print("\n" + "\n".join(report))
if errors:
raise RuntimeError(f"{len(errors)} backup(s) failed")
if __name__ == "__main__":
main()
+189
View File
@@ -0,0 +1,189 @@
import subprocess
import json
import os
from pathlib import Path
from datetime import datetime
import zipfile
import time
import traceback
from EmailMessagingGraph import send_mail
# ============================================================
# CONFIG
# ============================================================
GBAK = r"C:\Program Files\Firebird\Firebird_2_5_CGM\bin\gbak.exe"
FB_USER = "SYSDBA"
FB_PASS = "masterkey"
FB_PORT = "3050"
SRC_DIR = Path(r"c:\medicusext")
BACKUP_DIR = Path(r"U:\medicusbackup")
MAIL_TO = "vladimir.buzalka@buzalka.cz"
CHUNK = 8 * 1024 * 1024 # 8 MB
# ============================================================
# MAIN
# ============================================================
def main():
BACKUP_DIR.mkdir(parents=True, exist_ok=True)
now = datetime.now()
ts = now.strftime("%Y-%m-%d_%H-%M-%S")
# Find all FDB files (case-insensitive)
fdb_files = sorted(SRC_DIR.glob("MEDICUS_FILES_*.fdb"))
fdb_upper = sorted(SRC_DIR.glob("MEDICUS_FILES_*.FDB"))
fdb_all = sorted(
set(fdb_files + fdb_upper),
key=lambda p: p.name.lower(),
)
backed_up = []
errors = []
fbk_paths = [] # FBK files to be zipped together
# --------------------------------------------------------
# 1) GBAK all databases
# --------------------------------------------------------
for fdb in fdb_all:
name = fdb.stem
fbk = BACKUP_DIR / f"{name}_{ts}.fbk"
log = BACKUP_DIR / f"{name}_{ts}.log"
result = {
"file": fdb.name,
"ok": False,
"fbk_size": 0,
"zip_size": 0,
"t_gbak": 0,
"t_zip": 0,
"error": None,
}
try:
print(f"GBAK: {fdb.name} ... ", end="", flush=True)
t0 = time.time()
db_conn = f"localhost/{FB_PORT}:{fdb}"
cmd = [
GBAK, "-b",
"-user", FB_USER,
"-pas", FB_PASS,
db_conn, str(fbk),
"-v",
]
with open(log, "w", encoding="utf-8") as f:
subprocess.run(
cmd, stdout=f, stderr=subprocess.STDOUT, check=True,
)
result["t_gbak"] = time.time() - t0
result["fbk_size"] = fbk.stat().st_size
print(f"OK ({result['t_gbak']:.0f}s)")
# Delete log, keep FBK for zipping
log.unlink()
result["ok"] = True
fbk_paths.append((fbk, result))
backed_up.append(result)
except Exception:
result["error"] = traceback.format_exc()
errors.append(result)
for f in (fbk, log):
if f.exists():
f.unlink()
# --------------------------------------------------------
# 2) ZIP all FBK files into one archive
# --------------------------------------------------------
total_zip_size = 0
if fbk_paths:
zip_path = BACKUP_DIR / f"MEDICUS_FILES_{ts}.zip"
print(f"\nZIP: {zip_path.name}")
t_zip_start = time.time()
# Calculate total size for progress
total_fbk_size = sum(fbk.stat().st_size for fbk, _ in fbk_paths)
total_processed = 0
with zipfile.ZipFile(
zip_path, "w",
compression=zipfile.ZIP_DEFLATED,
compresslevel=9,
) as zf:
for fbk, result in fbk_paths:
zi = zipfile.ZipInfo(fbk.name)
zi.compress_type = zipfile.ZIP_DEFLATED
with zf.open(zi, "w", force_zip64=True) as z:
with open(fbk, "rb") as src:
while buf := src.read(CHUNK):
z.write(buf)
total_processed += len(buf)
pct = total_processed * 100 / total_fbk_size
print(
f"\r {fbk.name}: {pct:6.2f}%",
end="", flush=True,
)
print()
t_zip_total = time.time() - t_zip_start
total_zip_size = zip_path.stat().st_size
print(f"ZIP OK ({t_zip_total:.0f}s, {total_zip_size/1024/1024:.1f} MB)")
# Fill zip_size into each result and delete FBK files
for fbk, result in fbk_paths:
result["zip_size"] = total_zip_size
fbk.unlink()
# --------------------------------------------------------
# Build report
# --------------------------------------------------------
report = []
report.append(f"Backup externi DB - {now.strftime('%d.%m.%Y %H:%M')}")
report.append(f"Celkem souboru: {len(fdb_all)}")
report.append(f"Zalohovano: {len(backed_up)}")
report.append(f"Chyby: {len(errors)}")
report.append("")
if backed_up:
report.append("--- Backed up ---")
total_fbk_mb = sum(r["fbk_size"] for r in backed_up) / 1024 / 1024
for r in backed_up:
report.append(
f" {r['file']}: "
f"FBK {r['fbk_size']/1024/1024:.1f} MB "
f"(gbak {r['t_gbak']:.0f}s)"
)
report.append(f" Total FBK: {total_fbk_mb:.1f} MB -> ZIP: {total_zip_size/1024/1024:.1f} MB")
report.append("")
if errors:
report.append("--- ERRORS ---")
for r in errors:
report.append(f" {r['file']}: {r['error']}")
report.append("")
# Send email
has_errors = len(errors) > 0
subject = (
f"{'X' if has_errors else 'OK'} "
f"MEDICUS externi DB - "
f"backup {len(backed_up)}/{len(fdb_all)}"
f"{f', {len(errors)} errors' if has_errors else ''}"
)
send_mail(MAIL_TO, subject, "\n".join(report))
print("\n" + "\n".join(report))
if errors:
raise RuntimeError(f"{len(errors)} backup(s) failed")
if __name__ == "__main__":
main()
@@ -0,0 +1,156 @@
# test_import_FINAL.py detailní dokumentace
## Co skript dělá
Importuje PDF soubory (lékařské zprávy) do Medicus DB. Konkrétně:
1. Uloží fyzický soubor do **externí Firebird DB** (tabulka FILES)
2. Vloží nebo aktualizuje **dekurs pacienta** (tabulka DEKURS) s klikacím RTF odkazem na soubor
---
## Vstupní data (konfigurace nahoře)
```python
CESTA = r'u:\\' # adresář se zdrojovými PDF soubory
IDPAC = 9742 # ID pacienta v DB
DATUM = datetime.date(2026, 3, 18) # datum zprávy (ne dnešek!)
SOUBORY = [
{
'souborname': 'název souboru.pdf',
'prvnizavorka': 'typ zprávy', # např. "vyšetření"
'druhazavorka': 'poznámka', # volný text
'datum': DATUM,
},
...
]
```
Pozor: `DATUM` je datum zprávy (ne dnešek). Podle tohoto data se hledá existující dekurs.
---
## Rozhodovací logika 3 scénáře
```
Poslední dekurs pacienta
├─ z JINÉHO dne / neexistuje
│ └─→ SCÉNÁŘ 3: vytvoří nový dekurs
└─ z DNEŠNÍHO dne (= DATUM)
├─ MÁ sekci "Vložené přílohy"
│ └─→ SCÉNÁŘ 1: přidá odkaz DO existující sekce
└─ NEMÁ sekci "Vložené přílohy"
└─→ SCÉNÁŘ 2: prepend nové sekce na začátek
```
Klíčová funkce pro detekci: `ma_sekci_prilohy(rtf)` hledá RTF string `Vlo\'9een\'e9 p\'f8\'edlohy:` (= „Vložené přílohy:" zakódováno win1250).
---
## Krok 1 uložení souboru do ext DB
Volá `funkce_ext.zapis_file_ext(...)` pro každý soubor. Vrátí `fileid` (ID záznamu v tabulce FILES).
Z každého souboru se postaví:
- **bookmark entry** pro `{\info{\bookmarks ...}}` blok RTF:
`"2026-03-18 vyšetření: poznámka","Files:1234",9`
- **RTF pard** (klikací odkaz) pro tělo dekurzu:
`\pard\s10{\*\bkmkstart 0}\plain\cs32\f0\ul\fs20\cf1 2026-03-18 vyšetření: poznámka{\*\bkmkend 0}\par`
Číslo `cislo` začíná na 9 a roste po 7 (interní Medicus konvence). Index `poradi` (bkmkstart) začíná na 0 a roste po 1.
---
## Krok 2 práce s dekurzem
### Scénář 1: přidání DO existující sekce (`pridat_do_sekce_prilohy`)
Situace: dnešní dekurs již má blok „Vložené přílohy" s nějakými odkazy.
Postup:
1. Spočítá počet existujících `Files:` odkazů v `{\info{\bookmarks}}` → to je index nového (`new_idx`)
2. Posune všechny `\bkmkstart N` / `\bkmkend N` kde `N >= new_idx` o +1 (uvolní místo)
3. Vloží nový `\pard` řádek **před** uzavírací `\pard\s10\plain\cs15\f0\fs20 \par` sekce
4. Vloží nový bookmark na pozici `new_idx` v `{\info{\bookmarks}}`
Výsledek: soubor se přidá na konec existujícího seznamu příloh, indexy zůstanou konzistentní.
### Scénář 2: prepend nové sekce (`merge_rtf_prepend`)
Situace: dnešní dekurs existuje, ale ještě nemá blok příloh.
Postup:
1. Posune všechny existující `\bkmkstart N` / `\bkmkend N` o +n_new (počet nových souborů)
2. Přidá nové bookmarky **na začátek** `{\info{\bookmarks}}` bloku
- Pokud `{\info{\bookmarks}}` neexistuje, vloží ho za `\deflang1029`
3. Vloží nové tělo (záhlaví „Vložené přílohy:" + řádky s odkazy) **před** první `\uc1\pard` těla stávajícího dekurzu
Výsledek: sekce příloh je viditelně nahoře, stávající text dekurzu zůstane pod ní.
### Scénář 3: nový dekurs
Situace: žádný dnešní dekurs neexistuje.
Sestaví RTF šablonu s:
- `{\info{\bookmarks ...}}` všechny bookmarky
- záhlaví „Vložené přílohy:" + klikací řádky
- uzavírací prázdný řádek
Vloží jako nový řádek do tabulky DEKURS s `iduzi=6, idprac=2, idodd=2` (Vladimír Buzalka, ordinace).
---
## RTF formát dekurzu
```rtf
{\rtf1\ansi\ansicpg1250\uc1\deff0\deflang1029
{\info{\bookmarks "2026-03-18 vyšetření: poznámka","Files:1234",9}}
{\fonttbl{\f0\fnil\fcharset238 Arial;} ...}
{\colortbl ;\red0\green0\blue255; ...}
{\stylesheet ... {\*\cs32\f0\ul\fs20\cf1 Odkaz;}}
\uc1\pard\s10\plain\cs20\f0\i\fs20 Vložené přílohy:\par
\pard\s10{\*\bkmkstart 0}\plain\cs32\f0\ul\fs20\cf1 2026-03-18 vyšetření: poznámka{\*\bkmkend 0}\par
\pard\s10\plain\cs15\f0\fs20 \par
}
```
- **cs20** = kurzíva (záhlaví sekce)
- **cs32** = podtržený modrý text (klikací odkaz)
- **cs15** = normální text
- `\cf1` = modrá barva (první v colortbl)
---
## Závislosti
| Import | Odkud | Co dělá |
|--------|-------|---------|
| `funkce_ext.zapis_file_ext` | `funkce_ext.py` | Uloží soubor do ext DB (tabulka FILES), vrátí fileid |
| `funkce.get_dekurs_id` | `funkce.py` | Vrátí nové ID pro INSERT do tabulky DEKURS |
| `fdb` | pip | Připojení k Firebird DB |
---
## Tabulky v DB
| Tabulka | DB | Popis |
|---------|----|-------|
| `DEKURS` | hlavní (`medicus.fdb`) | Záznamy dekurzu, pole `DEKURS` obsahuje RTF text |
| `FILES` | ext DB (`MEDICUS_FILES_*.fdb`) | Binární obsah souborů |
---
## Jak spustit
Skript se spouští jednorázově na Windows stroji s přístupem k Firebird DB. Před spuštěním:
1. Upravit `SOUBORY` seznam PDF souborů ke zpracování
2. Zkontrolovat `IDPAC`, `DATUM`, `CESTA`
3. Ověřit, že PDF soubory fyzicky existují na `CESTA`
Po spuštění ověřit v Medicus: karta pacienta → záložka Dekurzy → kliknout na odkaz.