Compare commits

..

8 Commits

Author SHA1 Message Date
aa5bc3d3c8 Add autofilter to all Excel report sheets
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:29:37 +01:00
d783882cf6 Centralize token.txt to project root
- Updated 35 scripts to read token.txt from project root
  using Path(__file__).resolve().parent.parent / "token.txt"
- Removed 6 duplicate token.txt files from subdirectories
- Single token.txt in project root serves all scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:27:38 +01:00
5992a01cdd Merge branch 'master' of https://gitea.buzalka.cz/administrator/medevio 2026-03-10 18:16:53 +01:00
040e6074ae Add token.txt for agenda report scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:16:40 +01:00
8b2f26d8e0 reporter 2026-03-10 18:13:02 +01:00
e142acaccd Merge remote-tracking branch 'origin/master' 2026-03-10 18:11:02 +01:00
michaela.buzalkova
7da67294a1 sestra 2026-01-15 07:10:03 +01:00
bc44a65806 notebook 2026-01-14 17:28:41 +01:00
46 changed files with 530 additions and 103 deletions

Submodule .claude/worktrees/heuristic-lichterman deleted from e345e477d3

View File

@@ -8,10 +8,11 @@ Reads Bearer token from token.txt (single line, token only).
import requests import requests
import pandas as pd import pandas as pd
import time import time
from pathlib import Path
from typing import List, Dict, Any from typing import List, Dict, Any
# CONFIG --------------------------------------------------------------------- # CONFIG ---------------------------------------------------------------------
TOKEN_FILE = "token.txt" # file with token (no "Bearer " prefix) TOKEN_FILE = str(Path(__file__).resolve().parent.parent / "token.txt") # centralized token
GRAPHQL_URL = "https://app.medevio.cz/graphql" GRAPHQL_URL = "https://app.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova" # adjust if needed CLINIC_SLUG = "mudr-buzalkova" # adjust if needed
LOCALE = "cs" LOCALE = "cs"

View File

@@ -6,7 +6,7 @@ from pathlib import Path
import requests # 👈 this is new import requests # 👈 this is new
# --- Settings ---------------------------------------------------- # --- Settings ----------------------------------------------------
TOKEN_PATH = Path("token.txt") # file contains ONLY the token, no "Bearer " TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
SHOW_FULL_TOKEN = False # set True if you want to print the full token SHOW_FULL_TOKEN = False # set True if you want to print the full token
# ----------------------------------------------------------------- # -----------------------------------------------------------------

View File

@@ -6,7 +6,7 @@ from pathlib import Path
import requests import requests
# --- Settings ---------------------------------------------------- # --- Settings ----------------------------------------------------
TOKEN_PATH = Path("token.txt") # file contains ONLY the token, no "Bearer " TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
# ----------------------------------------------------------------- # -----------------------------------------------------------------

View File

@@ -5,7 +5,7 @@ import json
from pathlib import Path from pathlib import Path
import requests import requests
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
# --- Try including `updatedAt` field directly --- # --- Try including `updatedAt` field directly ---

View File

@@ -5,7 +5,7 @@ import json
from pathlib import Path from pathlib import Path
import requests import requests
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_QUERY = r""" GRAPHQL_QUERY = r"""

View File

@@ -11,7 +11,7 @@ from datetime import datetime
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED" STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"

View File

@@ -11,7 +11,7 @@ from datetime import datetime
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 1000 BATCH_SIZE = 1000
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED" STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"

View File

@@ -3,7 +3,7 @@ import json
from pathlib import Path from pathlib import Path
# === Nastavení === # === Nastavení ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4" REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230\Medevio_přílohy") OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230\Medevio_přílohy")

View File

@@ -2,7 +2,7 @@ import requests
import json import json
from pathlib import Path from pathlib import Path
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4" REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"

View File

@@ -13,7 +13,7 @@ import shutil
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BASE_DIR = Path(r"u:\Dropbox\ordinace\Dokumentace_ke_zpracování\Medevio_přílohy") BASE_DIR = Path(r"u:\Dropbox\ordinace\Dokumentace_ke_zpracování\Medevio_přílohy")
BASE_DIR.mkdir(parents=True, exist_ok=True) BASE_DIR.mkdir(parents=True, exist_ok=True)

View File

@@ -20,7 +20,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -17,7 +17,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -16,7 +16,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -10,7 +10,7 @@ import time, socket
# =============================== # ===============================
# ⚙️ CONFIG # ⚙️ CONFIG
# =============================== # ===============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -10,7 +10,7 @@ import time
# ================================ # ================================
# ⚙️ CONFIGURATION # ⚙️ CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -18,7 +18,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -23,7 +23,7 @@ except AttributeError:
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -6,41 +6,15 @@ import requests
from pathlib import Path from pathlib import Path
from datetime import datetime from datetime import datetime
from dateutil import parser from dateutil import parser
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ================================
# 🛡 SAFE PRINT FOR CP1250 / Emoji
# ================================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# strip emoji + characters outside BMP
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# final fallback to ASCII only
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300
LIMIT = 500 # batch size / number of records
FULL_DOWNLOAD = False # 🔥 TOGGLE: False = last X, True = ALL batches
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",
@@ -52,7 +26,7 @@ DB_CONFIG = {
"cursorclass": pymysql.cursors.DictCursor, "cursorclass": pymysql.cursors.DictCursor,
} }
# ⭐ GraphQL query # ⭐ Query with lastMessage
GRAPHQL_QUERY = r""" GRAPHQL_QUERY = r"""
query ClinicRequestList2( query ClinicRequestList2(
$clinicSlug: String!, $clinicSlug: String!,
@@ -95,27 +69,23 @@ query ClinicRequestList2(
# ================================ # ================================
def read_token(path: Path) -> str: def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip() tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "): return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
return tok.split(" ", 1)[1]
return tok
# ================================ # ================================
# DATETIME PARSER # DATETIME PARSER (UTC → MySQL)
# ================================ # ================================
def to_mysql_dt(iso_str): def to_mysql_dt(iso_str):
if not iso_str: if not iso_str:
return None return None
try: try:
dt = parser.isoparse(iso_str) dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() dt = dt.astimezone() # convert to local timezone
return dt.strftime("%Y-%m-%d %H:%M:%S") return dt.strftime("%Y-%m-%d %H:%M:%S")
except: except:
return None return None
# ================================ # ================================
# UPSERT # UPSERT REQUEST
# ================================ # ================================
def upsert(conn, r): def upsert(conn, r):
p = r.get("extendedPatient") or {} p = r.get("extendedPatient") or {}
@@ -147,7 +117,7 @@ def upsert(conn, r):
""" """
vals = ( vals = (
r.get("id"), r["id"],
r.get("displayTitle"), r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")), to_mysql_dt(r.get("createdAt")),
final_updated, final_updated,
@@ -163,16 +133,15 @@ def upsert(conn, r):
conn.commit() conn.commit()
# ================================ # ================================
# FETCH LAST 300 DONE REQUESTS # FETCH DONE REQUESTS (one batch)
# ================================ # ================================
def fetch_done(headers): def fetch_done(headers, offset):
vars = { vars = {
"clinicSlug": CLINIC_SLUG, "clinicSlug": CLINIC_SLUG,
"queueId": None, "queueId": None,
"queueAssignment": "ANY", "queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0}, "pageInfo": {"first": LIMIT, "offset": offset},
"locale": "cs", "locale": "cs",
"state": "DONE", "state": "DONE",
} }
@@ -187,8 +156,7 @@ def fetch_done(headers):
r.raise_for_status() r.raise_for_status()
data = r.json()["data"]["requestsResponse"] data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", []) return data.get("patientRequests", []), data.get("count", 0)
# ================================ # ================================
# MAIN # MAIN
@@ -203,18 +171,40 @@ def main():
conn = pymysql.connect(**DB_CONFIG) conn = pymysql.connect(**DB_CONFIG)
safe_print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===") print(f"\n=== Sync CLOSED requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers) offset = 0
safe_print(f"📌 Requests returned: {len(requests_list)}") total_count = None
total_processed = 0
for r in requests_list: while True:
upsert(conn, r) batch, count = fetch_done(headers, offset)
if total_count is None:
total_count = count
print(f"📡 Total DONE in Medevio: {count}")
if not batch:
break
print(f" • Processing batch offset={offset} size={len(batch)}")
for r in batch:
upsert(conn, r)
total_processed += len(batch)
if not FULL_DOWNLOAD:
# process only last LIMIT records
break
# FULL DOWNLOAD → fetch next batch
offset += LIMIT
if offset >= count:
break
conn.close() conn.close()
safe_print("\n\u2705 DONE - latest closed requests synced.\n") print(f"\n✅ DONE — {total_processed} requests synced.\n")
# ================================
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -45,7 +45,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -47,7 +47,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -40,7 +40,7 @@ def safe_print(text: str):
# ============================== # ==============================
# CONFIG # CONFIG
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -48,7 +48,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {
@@ -190,9 +190,10 @@ def main():
# Build query for pozadavky # Build query for pozadavky
sql = """ sql = """
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt SELECT id, pacient_prijmeni, pacient_jmeno, createdAt, updatedAt, attachmentsProcessed
FROM pozadavky FROM pozadavky
WHERE attachmentsProcessed IS NULL WHERE attachmentsProcessed IS NULL
OR updatedAt > attachmentsProcessed
""" """
params = [] params = []
if CREATED_AFTER: if CREATED_AFTER:

View File

@@ -0,0 +1,224 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
from collections import defaultdict
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
# ==============================
# 🔧 HELPERS
# ==============================
def sanitize_name(name: str) -> str:
"""Replace invalid Windows filename characters."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def make_abbrev(title: str) -> str:
"""Create abbreviation from title."""
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
if w.isdigit():
abbr += w
else:
abbr += w[0]
return abbr.upper()
def clean_folder(folder: Path, valid_files: set):
"""Remove unexpected files except ▲ files."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading only requests with NEW attachments…")
cur_meta.execute("""
SELECT
p.id AS request_id,
p.displayTitle,
p.pacient_jmeno,
p.pacient_prijmeni,
p.updatedAt,
p.attachmentsProcessed,
d.filename,
d.created_at
FROM pozadavky p
JOIN medevio_downloads d ON d.request_id = p.id
LEFT JOIN (
SELECT request_id, MAX(created_at) AS last_attachment_ts
FROM medevio_downloads
GROUP BY request_id
) x ON x.request_id = p.id
WHERE p.attachmentsProcessed IS NULL
OR p.attachmentsProcessed < x.last_attachment_ts
ORDER BY p.updatedAt DESC;
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} attachment rows belonging to requests needing processing.\n")
# ==============================
# 🧠 PREPARE REQUEST GROUPING
# ==============================
grouped = defaultdict(list)
for r in rows:
grouped[r["request_id"]].append(r)
unique_request_ids = list(grouped.keys())
total_requests = len(unique_request_ids)
print(f"🔄 Processing {total_requests} requests needing updates…\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
index = 0
for req_id in unique_request_ids:
index += 1
pct = (index / total_requests) * 100
print(f"\n[ {pct:5.1f}% ] Processing request {index}/{total_requests}{req_id}")
req_rows = grouped[req_id]
first = req_rows[0]
# Build folder name
updated_at = first["updatedAt"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(first["pacient_prijmeni"] or "Unknown")
jmeno = sanitize_name(first["pacient_jmeno"] or "")
abbr = make_abbrev(first["displayTitle"])
desired_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
# Detect existing folder for request
main_folder = None
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
main_folder = f
break
if not main_folder:
main_folder = BASE_DIR / desired_folder_name
main_folder.mkdir(parents=True, exist_ok=True)
# Build valid filename set
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
# Clean unexpected non-▲ files
clean_folder(main_folder, valid_files)
# Track if ANY new files were downloaded
added_new_file = False
# DOWNLOAD MISSING FILES
for r in req_rows:
filename = sanitize_name(r["filename"])
dest_plain = main_folder / filename
dest_flag = main_folder / ("" + filename)
# Skip if file already exists (plain or ▲)
if dest_plain.exists() or dest_flag.exists():
continue
# Fetch content
cur_blob.execute("""
SELECT file_content
FROM medevio_downloads
WHERE request_id=%s AND filename=%s
""", (req_id, r["filename"]))
row = cur_blob.fetchone()
if not row or not row[0]:
continue
with open(dest_plain, "wb") as f:
f.write(row[0])
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
added_new_file = True
# ------------------------------------
# 🟦 FOLDER ▲ LOGIC (IMPORTANT)
# ------------------------------------
if added_new_file:
# If folder contains ▲ in its name → remove it
if "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
try:
main_folder.rename(new_path)
print(f"🔄 Folder flag ▲ removed → {new_name}")
main_folder = new_path
except Exception as e:
print(f"⚠️ Could not rename folder: {e}")
else:
# NO new files → NEVER rename folder
pass
# Mark request as processed
cur_meta.execute(
"UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id=%s",
(req_id,)
)
conn.commit()
# ==============================
# 🏁 DONE
# ==============================
print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()
conn.close()

View File

@@ -0,0 +1,193 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def make_abbrev(title: str) -> str:
"""Create abbreviation from displayTitle."""
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
abbr += w if w.isdigit() else w[0]
return abbr.upper()
# ==============================
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading ALL metadata without file_content…")
# ⭐ Load ALL metadata once (NO BLOBs)
cur_meta.execute("""
SELECT
d.request_id,
d.filename,
d.created_at,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni,
p.displayTitle
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC;
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} metadata rows.\n")
# ==============================
# 🧠 PRE-GROUP METADATA
# ==============================
# Build dictionary: request_id → all metadata rows for that request
grouped = {}
for row in rows:
grouped.setdefault(row["request_id"], []).append(row)
unique_request_ids = list(grouped.keys())
total_requests = len(unique_request_ids)
print(f"🔄 Processing {total_requests} unique requests…\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
for idx, req_id in enumerate(unique_request_ids, start=1):
pct = (idx / total_requests) * 100
req_rows = grouped[req_id]
first = req_rows[0]
print(f"\n[ {pct:5.1f}% ] Processing request {idx}/{total_requests}{req_id}")
# ======================
# Build folder name
# ======================
updated_at = first["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(first["prijmeni"] or "Unknown")
jmeno = sanitize_name(first["jmeno"] or "")
abbr = make_abbrev(first["displayTitle"] or "")
clean_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
# Detect existing folder
existing_folder = None
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
existing_folder = f
break
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
main_folder.mkdir(parents=True, exist_ok=True)
# ======================
# Valid files for this request
# ======================
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
# Clean unexpected files
clean_folder(main_folder, valid_files)
# ======================
# DOWNLOAD MISSING FILES → only now load BLOBs
# ======================
added_new_file = False
for r in req_rows:
filename = sanitize_name(r["filename"])
dest_plain = main_folder / filename
dest_marked = main_folder / ("" + filename)
if dest_plain.exists() or dest_marked.exists():
continue
added_new_file = True
# ⭐ Load BLOB only when needed
cur_blob.execute("""
SELECT file_content
FROM medevio_downloads
WHERE request_id=%s AND filename=%s
""", (req_id, r["filename"]))
row = cur_blob.fetchone()
if not row or not row[0]:
continue
with open(dest_plain, "wb") as f:
f.write(row[0])
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
# ======================
# Folder-level ▲ logic
# ======================
if added_new_file and "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
try:
main_folder.rename(new_path)
main_folder = new_path
print(f"🔄 Folder flag ▲ removed → {new_name}")
except Exception as e:
print(f"⚠️ Could not rename folder: {e}")
cur_blob.close()
cur_meta.close()
conn.close()
print("\n🎯 Export complete.\n")

View File

@@ -0,0 +1,29 @@
[2025-12-01 06:37:41] === START pravidelného běhu ===
[2025-12-01 06:37:42] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
[2025-12-01 06:37:44] ↳ PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py return code: 0
[2025-12-01 06:37:44] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
[2025-12-01 06:37:48] ↳ PRAVIDELNE_1_ReadLast300DonePozadavku.py return code: 0
[2025-12-01 06:37:48] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
[2025-12-01 06:37:49] ↳ PRAVIDELNE_2_ReadPoznamky.py return code: 0
[2025-12-01 06:37:50] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
[2025-12-01 06:37:51] ↳ PRAVIDELNE_3_StahniKomunikaci.py return code: 0
[2025-12-01 06:37:52] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
[2025-12-01 06:37:53] ↳ PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py return code: 0
[2025-12-01 06:37:53] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
[2025-12-01 06:38:42] ↳ PRAVIDELNE_5_SaveToFileSystem incremental.py return code: 0
[2025-12-01 06:38:43] === KONEC pravidelného běhu ===
[2025-12-02 07:04:34] === START pravidelného běhu ===
[2025-12-02 07:04:34] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
[2025-12-02 07:04:35] ↳ return code: 0
[2025-12-02 07:04:35] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
[2025-12-02 07:04:39] ↳ return code: 0
[2025-12-02 07:04:39] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
[2025-12-02 07:05:28] ↳ return code: 0
[2025-12-02 07:05:28] === KONEC pravidelného běhu ===

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -20,7 +20,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -35,7 +35,7 @@ def safe_print(text: str):
# ================================ # ================================
# 🔧 CONFIG # 🔧 CONFIG
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500 BATCH_SIZE = 500

View File

@@ -21,7 +21,7 @@ import argparse
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("../10ReadPozadavky/token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -21,7 +21,7 @@ except AttributeError:
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -35,7 +35,7 @@ def safe_print(text: str):
# ================================ # ================================
# 🔧 CONFIG # 🔧 CONFIG
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500 BATCH_SIZE = 500

View File

@@ -41,7 +41,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION (UPDATED TO 192.168.1.50) # 🔧 CONFIGURATION (UPDATED TO 192.168.1.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -19,7 +19,7 @@ except AttributeError:
# ============================== # ==============================
# CONFIG (.50) # CONFIG (.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.50", "host": "192.168.1.50",

View File

@@ -40,7 +40,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION (.50) # 🔧 CONFIGURATION (.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -13,7 +13,7 @@ except:
# === CONFIG === # === CONFIG ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek který sledujeme REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek který sledujeme

View File

@@ -13,7 +13,7 @@ except:
# === CONFIG === # === CONFIG ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek

View File

@@ -16,7 +16,7 @@ except:
# === KONFIGURACE === # === KONFIGURACE ===
# --- Medevio API --- # --- Medevio API ---
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
# --- ZPRACOVÁNÍ --- # --- ZPRACOVÁNÍ ---

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -50,11 +50,7 @@ timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
xlsx_path = EXPORT_DIR / f"{timestamp} Agenda + Požadavky.xlsx" xlsx_path = EXPORT_DIR / f"{timestamp} Agenda + Požadavky.xlsx"
# ==================== LOAD TOKEN ==================== # ==================== LOAD TOKEN ====================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
if not TOKEN_PATH.exists():
TOKEN_PATH = Path(__file__).parent / "token.txt"
if not TOKEN_PATH.exists():
raise SystemExit(f"❌ token.txt not found")
gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip() gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip()
headers = { headers = {
@@ -107,6 +103,7 @@ def format_ws(ws, df):
cell.hyperlink = REQUEST_URL_TEMPLATE.format(cell.value) cell.hyperlink = REQUEST_URL_TEMPLATE.format(cell.value)
cell.font = link_font cell.font = link_font
ws.freeze_panes = "A2" ws.freeze_panes = "A2"
ws.auto_filter.ref = ws.dimensions
# ==================== 1⃣ LOAD AGENDA (API) ==================== # ==================== 1⃣ LOAD AGENDA (API) ====================

View File

@@ -6,7 +6,7 @@ import json
import requests import requests
from pathlib import Path from pathlib import Path
TOKEN_PATH = Path(__file__).parent / "token.txt" TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
REQUEST_ID = "6b46b5a8-b080-4821-86b0-39adabeec86b" REQUEST_ID = "6b46b5a8-b080-4821-86b0-39adabeec86b"

View File

@@ -37,9 +37,7 @@ DRY_RUN = False
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
TOKEN_PATH = Path(__file__).parent / "token.txt" TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
if not TOKEN_PATH.exists():
raise SystemExit("❌ token.txt not found")
gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip() gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip()
headers = { headers = {

View File

@@ -69,7 +69,7 @@ payload = {
"calendarIds": [CALENDAR_ID], "calendarIds": [CALENDAR_ID],
"clinicSlug": CLINIC_SLUG, "clinicSlug": CLINIC_SLUG,
"since": since_iso, "since": since_iso,
"until": "2025-11-30T21:59:59.999Z", "until": until_iso,
"locale": "cs", "locale": "cs",
"emptyCalendarIds": False, "emptyCalendarIds": False,
}, },

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH