Compare commits

..

14 Commits

Author SHA1 Message Date
aa5bc3d3c8 Add autofilter to all Excel report sheets
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:29:37 +01:00
d783882cf6 Centralize token.txt to project root
- Updated 35 scripts to read token.txt from project root
  using Path(__file__).resolve().parent.parent / "token.txt"
- Removed 6 duplicate token.txt files from subdirectories
- Single token.txt in project root serves all scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:27:38 +01:00
5992a01cdd Merge branch 'master' of https://gitea.buzalka.cz/administrator/medevio 2026-03-10 18:16:53 +01:00
040e6074ae Add token.txt for agenda report scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 18:16:40 +01:00
8b2f26d8e0 reporter 2026-03-10 18:13:02 +01:00
e142acaccd Merge remote-tracking branch 'origin/master' 2026-03-10 18:11:02 +01:00
af5749e3da notebook 2026-03-10 18:01:13 +01:00
46fbcdeff7 notebook 2026-03-10 17:39:47 +01:00
6478902172 Rename 871 test.py to Report_AgendaPozadavky.py
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 17:36:12 +01:00
68700ecd4d Merge branch 'claude/heuristic-lichterman' 2026-03-10 17:35:25 +01:00
e345e477d3 Rename report file and auto-delete previous reports
- Rename output to "YYYY-MM-DD HH-MM-SS Agenda + Požadavky.xlsx"
- Delete old reports before generating new one

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 17:34:29 +01:00
be0d41ad01 Fix agenda report and add MySQL sync for open requests
- 871 test.py: Switch auth from medevio_storage.json to token.txt,
  update MySQL port to 3306, add hyperlinks to Request_ID column,
  add better API error handling
- sync_open_requests.py: New script to sync doneAt/removedAt/updatedAt
  from Medevio API to MySQL for requests incorrectly marked as open
- check_request.py: Diagnostic script to inspect a single request via API
- check_mysql.py: Diagnostic script to inspect a single request in MySQL

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-10 07:44:19 +01:00
michaela.buzalkova
7da67294a1 sestra 2026-01-15 07:10:03 +01:00
bc44a65806 notebook 2026-01-14 17:28:41 +01:00
48 changed files with 833 additions and 114 deletions

Submodule .claude/worktrees/heuristic-lichterman deleted from be0d41ad01

3
.gitignore vendored
View File

@@ -8,6 +8,9 @@ __pycache__/
# PyCharm / IDE # PyCharm / IDE
.idea/ .idea/
# Claude worktrees
.claude/worktrees/
# OS # OS
.DS_Store .DS_Store
Thumbs.db Thumbs.db

View File

@@ -8,10 +8,11 @@ Reads Bearer token from token.txt (single line, token only).
import requests import requests
import pandas as pd import pandas as pd
import time import time
from pathlib import Path
from typing import List, Dict, Any from typing import List, Dict, Any
# CONFIG --------------------------------------------------------------------- # CONFIG ---------------------------------------------------------------------
TOKEN_FILE = "token.txt" # file with token (no "Bearer " prefix) TOKEN_FILE = str(Path(__file__).resolve().parent.parent / "token.txt") # centralized token
GRAPHQL_URL = "https://app.medevio.cz/graphql" GRAPHQL_URL = "https://app.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova" # adjust if needed CLINIC_SLUG = "mudr-buzalkova" # adjust if needed
LOCALE = "cs" LOCALE = "cs"

View File

@@ -6,7 +6,7 @@ from pathlib import Path
import requests # 👈 this is new import requests # 👈 this is new
# --- Settings ---------------------------------------------------- # --- Settings ----------------------------------------------------
TOKEN_PATH = Path("token.txt") # file contains ONLY the token, no "Bearer " TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
SHOW_FULL_TOKEN = False # set True if you want to print the full token SHOW_FULL_TOKEN = False # set True if you want to print the full token
# ----------------------------------------------------------------- # -----------------------------------------------------------------

View File

@@ -6,7 +6,7 @@ from pathlib import Path
import requests import requests
# --- Settings ---------------------------------------------------- # --- Settings ----------------------------------------------------
TOKEN_PATH = Path("token.txt") # file contains ONLY the token, no "Bearer " TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
# ----------------------------------------------------------------- # -----------------------------------------------------------------

View File

@@ -5,7 +5,7 @@ import json
from pathlib import Path from pathlib import Path
import requests import requests
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
# --- Try including `updatedAt` field directly --- # --- Try including `updatedAt` field directly ---

View File

@@ -5,7 +5,7 @@ import json
from pathlib import Path from pathlib import Path
import requests import requests
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_QUERY = r""" GRAPHQL_QUERY = r"""

View File

@@ -11,7 +11,7 @@ from datetime import datetime
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED" STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"

View File

@@ -11,7 +11,7 @@ from datetime import datetime
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 1000 BATCH_SIZE = 1000
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED" STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"

View File

@@ -3,7 +3,7 @@ import json
from pathlib import Path from pathlib import Path
# === Nastavení === # === Nastavení ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4" REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230\Medevio_přílohy") OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230\Medevio_přílohy")

View File

@@ -2,7 +2,7 @@ import requests
import json import json
from pathlib import Path from pathlib import Path
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4" REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"

View File

@@ -13,7 +13,7 @@ import shutil
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BASE_DIR = Path(r"u:\Dropbox\ordinace\Dokumentace_ke_zpracování\Medevio_přílohy") BASE_DIR = Path(r"u:\Dropbox\ordinace\Dokumentace_ke_zpracování\Medevio_přílohy")
BASE_DIR.mkdir(parents=True, exist_ok=True) BASE_DIR.mkdir(parents=True, exist_ok=True)

View File

@@ -20,7 +20,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -17,7 +17,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -16,7 +16,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -10,7 +10,7 @@ import time, socket
# =============================== # ===============================
# ⚙️ CONFIG # ⚙️ CONFIG
# =============================== # ===============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -10,7 +10,7 @@ import time
# ================================ # ================================
# ⚙️ CONFIGURATION # ⚙️ CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -18,7 +18,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -23,7 +23,7 @@ except AttributeError:
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -6,41 +6,15 @@ import requests
from pathlib import Path from pathlib import Path
from datetime import datetime from datetime import datetime
from dateutil import parser from dateutil import parser
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ================================
# 🛡 SAFE PRINT FOR CP1250 / Emoji
# ================================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# strip emoji + characters outside BMP
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# final fallback to ASCII only
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300
LIMIT = 500 # batch size / number of records
FULL_DOWNLOAD = False # 🔥 TOGGLE: False = last X, True = ALL batches
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",
@@ -52,7 +26,7 @@ DB_CONFIG = {
"cursorclass": pymysql.cursors.DictCursor, "cursorclass": pymysql.cursors.DictCursor,
} }
# ⭐ GraphQL query # ⭐ Query with lastMessage
GRAPHQL_QUERY = r""" GRAPHQL_QUERY = r"""
query ClinicRequestList2( query ClinicRequestList2(
$clinicSlug: String!, $clinicSlug: String!,
@@ -95,27 +69,23 @@ query ClinicRequestList2(
# ================================ # ================================
def read_token(path: Path) -> str: def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip() tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "): return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
return tok.split(" ", 1)[1]
return tok
# ================================ # ================================
# DATETIME PARSER # DATETIME PARSER (UTC → MySQL)
# ================================ # ================================
def to_mysql_dt(iso_str): def to_mysql_dt(iso_str):
if not iso_str: if not iso_str:
return None return None
try: try:
dt = parser.isoparse(iso_str) dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() dt = dt.astimezone() # convert to local timezone
return dt.strftime("%Y-%m-%d %H:%M:%S") return dt.strftime("%Y-%m-%d %H:%M:%S")
except: except:
return None return None
# ================================ # ================================
# UPSERT # UPSERT REQUEST
# ================================ # ================================
def upsert(conn, r): def upsert(conn, r):
p = r.get("extendedPatient") or {} p = r.get("extendedPatient") or {}
@@ -147,7 +117,7 @@ def upsert(conn, r):
""" """
vals = ( vals = (
r.get("id"), r["id"],
r.get("displayTitle"), r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")), to_mysql_dt(r.get("createdAt")),
final_updated, final_updated,
@@ -163,16 +133,15 @@ def upsert(conn, r):
conn.commit() conn.commit()
# ================================ # ================================
# FETCH LAST 300 DONE REQUESTS # FETCH DONE REQUESTS (one batch)
# ================================ # ================================
def fetch_done(headers): def fetch_done(headers, offset):
vars = { vars = {
"clinicSlug": CLINIC_SLUG, "clinicSlug": CLINIC_SLUG,
"queueId": None, "queueId": None,
"queueAssignment": "ANY", "queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0}, "pageInfo": {"first": LIMIT, "offset": offset},
"locale": "cs", "locale": "cs",
"state": "DONE", "state": "DONE",
} }
@@ -187,8 +156,7 @@ def fetch_done(headers):
r.raise_for_status() r.raise_for_status()
data = r.json()["data"]["requestsResponse"] data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", []) return data.get("patientRequests", []), data.get("count", 0)
# ================================ # ================================
# MAIN # MAIN
@@ -203,18 +171,40 @@ def main():
conn = pymysql.connect(**DB_CONFIG) conn = pymysql.connect(**DB_CONFIG)
safe_print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===") print(f"\n=== Sync CLOSED requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers) offset = 0
safe_print(f"📌 Requests returned: {len(requests_list)}") total_count = None
total_processed = 0
for r in requests_list: while True:
batch, count = fetch_done(headers, offset)
if total_count is None:
total_count = count
print(f"📡 Total DONE in Medevio: {count}")
if not batch:
break
print(f" • Processing batch offset={offset} size={len(batch)}")
for r in batch:
upsert(conn, r) upsert(conn, r)
total_processed += len(batch)
if not FULL_DOWNLOAD:
# process only last LIMIT records
break
# FULL DOWNLOAD → fetch next batch
offset += LIMIT
if offset >= count:
break
conn.close() conn.close()
safe_print("\n\u2705 DONE - latest closed requests synced.\n") print(f"\n✅ DONE — {total_processed} requests synced.\n")
# ================================
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -45,7 +45,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -47,7 +47,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -40,7 +40,7 @@ def safe_print(text: str):
# ============================== # ==============================
# CONFIG # CONFIG
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -48,7 +48,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {
@@ -190,9 +190,10 @@ def main():
# Build query for pozadavky # Build query for pozadavky
sql = """ sql = """
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt SELECT id, pacient_prijmeni, pacient_jmeno, createdAt, updatedAt, attachmentsProcessed
FROM pozadavky FROM pozadavky
WHERE attachmentsProcessed IS NULL WHERE attachmentsProcessed IS NULL
OR updatedAt > attachmentsProcessed
""" """
params = [] params = []
if CREATED_AFTER: if CREATED_AFTER:

View File

@@ -0,0 +1,224 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
from collections import defaultdict
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
# ==============================
# 🔧 HELPERS
# ==============================
def sanitize_name(name: str) -> str:
"""Replace invalid Windows filename characters."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def make_abbrev(title: str) -> str:
"""Create abbreviation from title."""
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
if w.isdigit():
abbr += w
else:
abbr += w[0]
return abbr.upper()
def clean_folder(folder: Path, valid_files: set):
"""Remove unexpected files except ▲ files."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading only requests with NEW attachments…")
cur_meta.execute("""
SELECT
p.id AS request_id,
p.displayTitle,
p.pacient_jmeno,
p.pacient_prijmeni,
p.updatedAt,
p.attachmentsProcessed,
d.filename,
d.created_at
FROM pozadavky p
JOIN medevio_downloads d ON d.request_id = p.id
LEFT JOIN (
SELECT request_id, MAX(created_at) AS last_attachment_ts
FROM medevio_downloads
GROUP BY request_id
) x ON x.request_id = p.id
WHERE p.attachmentsProcessed IS NULL
OR p.attachmentsProcessed < x.last_attachment_ts
ORDER BY p.updatedAt DESC;
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} attachment rows belonging to requests needing processing.\n")
# ==============================
# 🧠 PREPARE REQUEST GROUPING
# ==============================
grouped = defaultdict(list)
for r in rows:
grouped[r["request_id"]].append(r)
unique_request_ids = list(grouped.keys())
total_requests = len(unique_request_ids)
print(f"🔄 Processing {total_requests} requests needing updates…\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
index = 0
for req_id in unique_request_ids:
index += 1
pct = (index / total_requests) * 100
print(f"\n[ {pct:5.1f}% ] Processing request {index}/{total_requests}{req_id}")
req_rows = grouped[req_id]
first = req_rows[0]
# Build folder name
updated_at = first["updatedAt"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(first["pacient_prijmeni"] or "Unknown")
jmeno = sanitize_name(first["pacient_jmeno"] or "")
abbr = make_abbrev(first["displayTitle"])
desired_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
# Detect existing folder for request
main_folder = None
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
main_folder = f
break
if not main_folder:
main_folder = BASE_DIR / desired_folder_name
main_folder.mkdir(parents=True, exist_ok=True)
# Build valid filename set
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
# Clean unexpected non-▲ files
clean_folder(main_folder, valid_files)
# Track if ANY new files were downloaded
added_new_file = False
# DOWNLOAD MISSING FILES
for r in req_rows:
filename = sanitize_name(r["filename"])
dest_plain = main_folder / filename
dest_flag = main_folder / ("" + filename)
# Skip if file already exists (plain or ▲)
if dest_plain.exists() or dest_flag.exists():
continue
# Fetch content
cur_blob.execute("""
SELECT file_content
FROM medevio_downloads
WHERE request_id=%s AND filename=%s
""", (req_id, r["filename"]))
row = cur_blob.fetchone()
if not row or not row[0]:
continue
with open(dest_plain, "wb") as f:
f.write(row[0])
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
added_new_file = True
# ------------------------------------
# 🟦 FOLDER ▲ LOGIC (IMPORTANT)
# ------------------------------------
if added_new_file:
# If folder contains ▲ in its name → remove it
if "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
try:
main_folder.rename(new_path)
print(f"🔄 Folder flag ▲ removed → {new_name}")
main_folder = new_path
except Exception as e:
print(f"⚠️ Could not rename folder: {e}")
else:
# NO new files → NEVER rename folder
pass
# Mark request as processed
cur_meta.execute(
"UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id=%s",
(req_id,)
)
conn.commit()
# ==============================
# 🏁 DONE
# ==============================
print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()
conn.close()

View File

@@ -0,0 +1,193 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def make_abbrev(title: str) -> str:
"""Create abbreviation from displayTitle."""
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
abbr += w if w.isdigit() else w[0]
return abbr.upper()
# ==============================
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading ALL metadata without file_content…")
# ⭐ Load ALL metadata once (NO BLOBs)
cur_meta.execute("""
SELECT
d.request_id,
d.filename,
d.created_at,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni,
p.displayTitle
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC;
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} metadata rows.\n")
# ==============================
# 🧠 PRE-GROUP METADATA
# ==============================
# Build dictionary: request_id → all metadata rows for that request
grouped = {}
for row in rows:
grouped.setdefault(row["request_id"], []).append(row)
unique_request_ids = list(grouped.keys())
total_requests = len(unique_request_ids)
print(f"🔄 Processing {total_requests} unique requests…\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
for idx, req_id in enumerate(unique_request_ids, start=1):
pct = (idx / total_requests) * 100
req_rows = grouped[req_id]
first = req_rows[0]
print(f"\n[ {pct:5.1f}% ] Processing request {idx}/{total_requests}{req_id}")
# ======================
# Build folder name
# ======================
updated_at = first["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(first["prijmeni"] or "Unknown")
jmeno = sanitize_name(first["jmeno"] or "")
abbr = make_abbrev(first["displayTitle"] or "")
clean_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
# Detect existing folder
existing_folder = None
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
existing_folder = f
break
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
main_folder.mkdir(parents=True, exist_ok=True)
# ======================
# Valid files for this request
# ======================
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
# Clean unexpected files
clean_folder(main_folder, valid_files)
# ======================
# DOWNLOAD MISSING FILES → only now load BLOBs
# ======================
added_new_file = False
for r in req_rows:
filename = sanitize_name(r["filename"])
dest_plain = main_folder / filename
dest_marked = main_folder / ("" + filename)
if dest_plain.exists() or dest_marked.exists():
continue
added_new_file = True
# ⭐ Load BLOB only when needed
cur_blob.execute("""
SELECT file_content
FROM medevio_downloads
WHERE request_id=%s AND filename=%s
""", (req_id, r["filename"]))
row = cur_blob.fetchone()
if not row or not row[0]:
continue
with open(dest_plain, "wb") as f:
f.write(row[0])
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
# ======================
# Folder-level ▲ logic
# ======================
if added_new_file and "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
try:
main_folder.rename(new_path)
main_folder = new_path
print(f"🔄 Folder flag ▲ removed → {new_name}")
except Exception as e:
print(f"⚠️ Could not rename folder: {e}")
cur_blob.close()
cur_meta.close()
conn.close()
print("\n🎯 Export complete.\n")

View File

@@ -0,0 +1,29 @@
[2025-12-01 06:37:41] === START pravidelného běhu ===
[2025-12-01 06:37:42] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
[2025-12-01 06:37:44] ↳ PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py return code: 0
[2025-12-01 06:37:44] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
[2025-12-01 06:37:48] ↳ PRAVIDELNE_1_ReadLast300DonePozadavku.py return code: 0
[2025-12-01 06:37:48] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
[2025-12-01 06:37:49] ↳ PRAVIDELNE_2_ReadPoznamky.py return code: 0
[2025-12-01 06:37:50] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
[2025-12-01 06:37:51] ↳ PRAVIDELNE_3_StahniKomunikaci.py return code: 0
[2025-12-01 06:37:52] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
[2025-12-01 06:37:53] ↳ PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py return code: 0
[2025-12-01 06:37:53] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
[2025-12-01 06:38:42] ↳ PRAVIDELNE_5_SaveToFileSystem incremental.py return code: 0
[2025-12-01 06:38:43] === KONEC pravidelného běhu ===
[2025-12-02 07:04:34] === START pravidelného běhu ===
[2025-12-02 07:04:34] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
[2025-12-02 07:04:35] ↳ return code: 0
[2025-12-02 07:04:35] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
[2025-12-02 07:04:39] ↳ return code: 0
[2025-12-02 07:04:39] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
[2025-12-02 07:04:40] ↳ return code: 0
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
[2025-12-02 07:05:28] ↳ return code: 0
[2025-12-02 07:05:28] === KONEC pravidelného běhu ===

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -20,7 +20,7 @@ import time
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -35,7 +35,7 @@ def safe_print(text: str):
# ================================ # ================================
# 🔧 CONFIG # 🔧 CONFIG
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500 BATCH_SIZE = 500

View File

@@ -21,7 +21,7 @@ import argparse
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("../10ReadPozadavky/token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -21,7 +21,7 @@ except AttributeError:
# ================================ # ================================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100 BATCH_SIZE = 100

View File

@@ -35,7 +35,7 @@ def safe_print(text: str):
# ================================ # ================================
# 🔧 CONFIG # 🔧 CONFIG
# ================================ # ================================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500 BATCH_SIZE = 500

View File

@@ -41,7 +41,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION (UPDATED TO 192.168.1.50) # 🔧 CONFIGURATION (UPDATED TO 192.168.1.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"

View File

@@ -19,7 +19,7 @@ except AttributeError:
# ============================== # ==============================
# CONFIG (.50) # CONFIG (.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.50", "host": "192.168.1.50",

View File

@@ -40,7 +40,7 @@ def safe_print(text: str):
# ============================== # ==============================
# 🔧 CONFIGURATION (.50) # 🔧 CONFIGURATION (.50)
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
CLINIC_SLUG = "mudr-buzalkova" CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -13,7 +13,7 @@ except:
# === CONFIG === # === CONFIG ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek který sledujeme REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek který sledujeme

View File

@@ -13,7 +13,7 @@ except:
# === CONFIG === # === CONFIG ===
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek

View File

@@ -16,7 +16,7 @@ except:
# === KONFIGURACE === # === KONFIGURACE ===
# --- Medevio API --- # --- Medevio API ---
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
# --- ZPRACOVÁNÍ --- # --- ZPRACOVÁNÍ ---

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -30,7 +30,7 @@ CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",
"port": 3307, "port": 3306,
"user": "root", "user": "root",
"password": "Vlado9674+", "password": "Vlado9674+",
"database": "medevio", "database": "medevio",
@@ -40,26 +40,18 @@ DB_CONFIG = {
EXPORT_DIR = Path(r"u:\Dropbox\Ordinace\Reporty") EXPORT_DIR = Path(r"u:\Dropbox\Ordinace\Reporty")
EXPORT_DIR.mkdir(exist_ok=True, parents=True) EXPORT_DIR.mkdir(exist_ok=True, parents=True)
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
xlsx_path = EXPORT_DIR / f"{timestamp} Agenda + Pozadavky (Merged).xlsx" # Delete previous reports
for old in EXPORT_DIR.glob("* Agenda + Požadavky.xlsx"):
old.unlink()
print(f"🗑️ Deleted old report: {old.name}")
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
xlsx_path = EXPORT_DIR / f"{timestamp} Agenda + Požadavky.xlsx"
# ==================== LOAD TOKEN ==================== # ==================== LOAD TOKEN ====================
def load_gateway_token(storage_path="medevio_storage.json"): TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
path = Path(storage_path) gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip()
if not path.exists():
raise SystemExit(f"❌ Storage file not found: {path}")
with path.open("r", encoding="utf-8") as f:
state = json.load(f)
token = next(
(c["value"] for c in state["cookies"] if c["name"] == "gateway-access-token"),
None,
)
if not token:
raise SystemExit("❌ gateway-access-token not found in storage file.")
return token
gateway_token = load_gateway_token()
headers = { headers = {
"content-type": "application/json", "content-type": "application/json",
@@ -80,8 +72,18 @@ thin_border = Border(
) )
REQUEST_URL_TEMPLATE = "https://my.medevio.cz/mudr-buzalkova/klinika/pozadavky?pozadavek={}"
link_font = Font(color="0563C1", underline="single")
def format_ws(ws, df): def format_ws(ws, df):
"""Apply unified formatting to a worksheet.""" """Apply unified formatting to a worksheet."""
# Find Request_ID column index (1-based)
req_id_col = None
columns = list(df.columns)
if "Request_ID" in columns:
req_id_col = columns.index("Request_ID") + 1
for col_idx in range(1, len(df.columns) + 1): for col_idx in range(1, len(df.columns) + 1):
col_letter = get_column_letter(col_idx) col_letter = get_column_letter(col_idx)
cell = ws.cell(row=1, column=col_idx) cell = ws.cell(row=1, column=col_idx)
@@ -96,7 +98,12 @@ def format_ws(ws, df):
cell.border = thin_border cell.border = thin_border
if r_idx % 2 == 0: if r_idx % 2 == 0:
cell.fill = alt_fill cell.fill = alt_fill
# Add hyperlink to Request_ID cells
if req_id_col and cell.column == req_id_col and cell.value:
cell.hyperlink = REQUEST_URL_TEMPLATE.format(cell.value)
cell.font = link_font
ws.freeze_panes = "A2" ws.freeze_panes = "A2"
ws.auto_filter.ref = ws.dimensions
# ==================== 1⃣ LOAD AGENDA (API) ==================== # ==================== 1⃣ LOAD AGENDA (API) ====================
@@ -138,7 +145,12 @@ payload = {
r = requests.post(GRAPHQL_URL, headers=headers, data=json.dumps(payload)) r = requests.post(GRAPHQL_URL, headers=headers, data=json.dumps(payload))
r.raise_for_status() r.raise_for_status()
reservations = r.json()["data"]["reservations"] resp = r.json()
if "errors" in resp or "data" not in resp:
print("❌ API response:")
print(json.dumps(resp, indent=2, ensure_ascii=False))
raise SystemExit("API call failed - check token or query.")
reservations = resp["data"]["reservations"]
rows = [] rows = []
for r in reservations: for r in reservations:

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Check one request in MySQL."""
import pymysql
import json
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
REQUEST_ID = "6b46b5a8-b080-4821-86b0-39adabeec86b"
conn = pymysql.connect(**DB_CONFIG)
with conn.cursor() as cur:
cur.execute("SELECT * FROM pozadavky WHERE id = %s", (REQUEST_ID,))
row = cur.fetchone()
conn.close()
if row:
# Convert datetime objects to strings for JSON serialization
for k, v in row.items():
if hasattr(v, 'isoformat'):
row[k] = v.isoformat()
print(json.dumps(row, indent=2, ensure_ascii=False, default=str))
else:
print(f"Not found: {REQUEST_ID}")

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Quick check: fetch one request from Medevio API and print all fields."""
import json
import requests
from pathlib import Path
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
REQUEST_ID = "6b46b5a8-b080-4821-86b0-39adabeec86b"
token = TOKEN_PATH.read_text(encoding="utf-8").strip()
headers = {
"content-type": "application/json",
"authorization": f"Bearer {token}",
"origin": "https://my.medevio.cz",
"referer": "https://my.medevio.cz/",
}
# Query with as many fields as possible
QUERY = """
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
userNote
eventType
extendedPatient(clinicSlug: $clinicSlug) {
name
surname
dob
identificationNumber
insuranceCompanyObject { shortName }
}
ecrfFilledData(locale: $locale) {
name
groups {
label
fields { name label type value }
}
}
}
}
"""
payload = {
"operationName": "GetPatientRequest2",
"query": QUERY,
"variables": {
"requestId": REQUEST_ID,
"clinicSlug": CLINIC_SLUG,
"locale": "cs",
},
}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=30)
print(json.dumps(r.json(), indent=2, ensure_ascii=False))

View File

@@ -0,0 +1,176 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Sync open requests: checks each request marked as open in MySQL (doneAt IS NULL
AND removedAt IS NULL) against the Medevio API. If the API shows the request is
closed (doneAt) or removed (removedAt), updates MySQL accordingly.
"""
import json
import sys
import time
import requests
import pymysql
from pathlib import Path
from datetime import datetime
# ==============================
# UTF-8 output (Windows friendly)
# ==============================
try:
sys.stdout.reconfigure(encoding="utf-8")
sys.stderr.reconfigure(encoding="utf-8")
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding="utf-8")
# ==============================
# DRY RUN - set to True to only print what would be updated, False to actually update
# ==============================
DRY_RUN = False
# ==============================
# CONFIG
# ==============================
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
gateway_token = TOKEN_PATH.read_text(encoding="utf-8").strip()
headers = {
"content-type": "application/json",
"authorization": f"Bearer {gateway_token}",
"origin": "https://my.medevio.cz",
"referer": "https://my.medevio.cz/",
}
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = """
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!) {
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
id
doneAt
removedAt
updatedAt
}
}
"""
def fix_datetime(dt_str):
if not dt_str:
return None
try:
return datetime.fromisoformat(dt_str.replace("Z", "+00:00"))
except Exception:
return None
def fetch_request(request_id):
payload = {
"operationName": "GetPatientRequest2",
"query": GRAPHQL_QUERY,
"variables": {
"requestId": request_id,
"clinicSlug": CLINIC_SLUG,
},
}
for attempt in range(3):
try:
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=30)
break
except (requests.ConnectionError, requests.Timeout, requests.exceptions.RequestException) as e:
print(f" ⚠️ Attempt {attempt+1}/3 failed: {e}")
time.sleep(2)
else:
print(f" ❌ Connection failed after 3 attempts for {request_id}")
return None
if r.status_code != 200:
print(f" ❌ HTTP {r.status_code} for {request_id}")
return None
data = r.json()
if "errors" in data:
print(f" ❌ API error for {request_id}: {data['errors']}")
return None
return data.get("data", {}).get("request")
# ==============================
# MAIN
# ==============================
conn = pymysql.connect(**DB_CONFIG)
# 1) Read all open requests from MySQL
with conn.cursor() as cur:
cur.execute(
"SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno "
"FROM pozadavky WHERE doneAt IS NULL AND removedAt IS NULL"
)
open_requests = cur.fetchall()
mode = "DRY RUN" if DRY_RUN else "LIVE"
print(f"🔧 Mode: {mode}")
print(f"📋 Found {len(open_requests)} open requests in MySQL.\n")
updated = 0
errors = 0
for i, req in enumerate(open_requests, 1):
rid = req["id"]
name = f"{req.get('pacient_prijmeni', '')} {req.get('pacient_jmeno', '')}".strip()
title = req.get("displayTitle", "")
print(f"[{i}/{len(open_requests)}] {name} {title} ({rid})")
api_data = fetch_request(rid)
if api_data is None:
errors += 1
continue
api_done = api_data.get("doneAt")
api_removed = api_data.get("removedAt")
api_updated = api_data.get("updatedAt")
if api_done or api_removed:
done_dt = fix_datetime(api_done)
removed_dt = fix_datetime(api_removed)
updated_dt = fix_datetime(api_updated)
status = "DONE" if api_done else "REMOVED"
if DRY_RUN:
print(f" 🔍 Would update → {status} (doneAt={api_done}, removedAt={api_removed})")
else:
with conn.cursor() as cur:
cur.execute(
"UPDATE pozadavky SET doneAt = %s, removedAt = %s, updatedAt = %s WHERE id = %s",
(done_dt, removed_dt, updated_dt, rid),
)
conn.commit()
print(f" ✅ Updated → {status}")
updated += 1
else:
print(f" ⏳ Still open")
# Be gentle with the API
time.sleep(1)
conn.close()
print(f"\n{'='*50}")
print(f"📊 Total open in MySQL: {len(open_requests)}")
print(f"✅ Updated (closed/removed): {updated}")
print(f"⏳ Still open: {len(open_requests) - updated - errors}")
print(f"❌ Errors: {errors}")

View File

@@ -69,7 +69,7 @@ payload = {
"calendarIds": [CALENDAR_ID], "calendarIds": [CALENDAR_ID],
"clinicSlug": CLINIC_SLUG, "clinicSlug": CLINIC_SLUG,
"since": since_iso, "since": since_iso,
"until": "2025-11-30T21:59:59.999Z", "until": until_iso,
"locale": "cs", "locale": "cs",
"emptyCalendarIds": False, "emptyCalendarIds": False,
}, },

View File

@@ -1 +0,0 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH