This commit is contained in:
2025-11-16 10:59:38 +01:00
parent 585e38284b
commit 105b10a06e
3 changed files with 405 additions and 22 deletions

View File

@@ -5,14 +5,14 @@ import pymysql
import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300 # download the latest 300 requests
LIMIT = 300 # stáhneme posledních 300 ukončených požadavků
DB_CONFIG = {
"host": "192.168.1.76",
@@ -24,21 +24,22 @@ DB_CONFIG = {
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ Ověřený dotaz s lastMessage
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!,
$state: PatientRequestState
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
pageInfo: $pageInfo,
state: $state
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
@@ -53,6 +54,9 @@ query ClinicRequestGrid_ListPatientRequestsForClinic2(
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
@@ -61,29 +65,46 @@ query ClinicRequestGrid_ListPatientRequestsForClinic2(
# ================================
# TOKEN
# ================================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATE PARSER
# DATETIME PARSER (UTC → MySQL)
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() # převede na lokální čas (CET/CEST)
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# UPSERT
# UPSERT WITH MERGED UPDATED TIME
# ================================
def upsert(conn, r):
p = (r.get("extendedPatient") or {})
p = r.get("extendedPatient") or {}
# API pole
api_updated = to_mysql_dt(r.get("updatedAt"))
# poslední zpráva
last_msg = r.get("lastMessage") or {}
msg_at = to_mysql_dt(last_msg.get("createdAt"))
# vybereme novější čas
def max_dt(a, b):
if a and b:
return max(a, b)
return a or b
final_updated = max_dt(api_updated, msg_at)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
@@ -98,36 +119,39 @@ def upsert(conn, r):
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
to_mysql_dt(r.get("updatedAt")),
final_updated,
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH LATEST 300 REQUESTS
# FETCH LAST 300 DONE REQUESTS
# ================================
def fetch_latest_requests(headers):
def fetch_done(headers):
vars = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0},
"locale": "cs",
"state": "DONE" # ALL STATES (ACTIVE, DONE, REMOVED)
"state": "DONE",
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": vars,
}
@@ -151,17 +175,16 @@ def main():
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Downloading last {LIMIT} requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_latest_requests(headers)
print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers)
print(f"📌 Requests returned: {len(requests_list)}")
for r in requests_list:
upsert(conn, r)
conn.close()
print("\n✅ Done. Latest requests synced.\n")
print("\n✅ DONE - latest closed requests synced.\n")
if __name__ == "__main__":

191
Testy/18 Test.py Normal file
View File

@@ -0,0 +1,191 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300 # stáhneme posledních 300 ukončených požadavků
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ Ověřený dotaz s lastMessage
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATETIME PARSER (UTC → MySQL)
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() # převede na lokální čas (CET/CEST)
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# UPSERT WITH MERGED UPDATED TIME
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
# API pole
api_updated = to_mysql_dt(r.get("updatedAt"))
# poslední zpráva
last_msg = r.get("lastMessage") or {}
msg_at = to_mysql_dt(last_msg.get("createdAt"))
# vybereme novější čas
def max_dt(a, b):
if a and b:
return max(a, b)
return a or b
final_updated = max_dt(api_updated, msg_at)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
final_updated,
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH LAST 300 DONE REQUESTS
# ================================
def fetch_done(headers):
vars = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0},
"locale": "cs",
"state": "DONE",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": vars,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", [])
# ================================
# MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers)
print(f"📌 Requests returned: {len(requests_list)}")
for r in requests_list:
upsert(conn, r)
conn.close()
print("\n✅ DONE - latest closed requests synced.\n")
if __name__ == "__main__":
main()

169
Testy/19 Test.py Normal file
View File

@@ -0,0 +1,169 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP1")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
# ==============================
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
"""Remove all files in folder that are NOT present in valid_files."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if sanitize_name(f.name) not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading metadata from DB (FAST)…")
cur_meta.execute("""
SELECT d.id AS download_id,
d.request_id,
d.filename,
d.created_at,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} attachment records.\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
processed_requests = set()
for r in rows:
req_id = r["request_id"]
if req_id in processed_requests:
continue
processed_requests.add(req_id)
# ========== FETCH ALL VALID FILES FOR THIS REQUEST ==========
cur_meta.execute(
"SELECT filename FROM medevio_downloads WHERE request_id=%s",
(req_id,)
)
valid_files = {sanitize_name(row["filename"]) for row in cur_meta.fetchall()}
# ========== FOLDER NAME BASED ON UPDATEDAT ==========
updated_at = r["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
jmeno = sanitize_name(r["jmeno"] or "")
folder_name = f"{date_str} {prijmeni}, {jmeno} {req_id}"
folder_name = sanitize_name(folder_name)
main_folder = BASE_DIR / folder_name
# ========== FIND OLD FOLDER (DUPLICATE) ==========
# Any folder that contains "_<req_id>" and is not main_folder is duplicate
possible_dups = [
f for f in BASE_DIR.iterdir()
if f.is_dir() and req_id in f.name and f != main_folder
]
# ========== MERGE DUPLICATES ==========
for dup in possible_dups:
print(f"♻️ Merging duplicate folder: {dup.name}")
# 1) Clean unexpected files in dup
clean_folder(dup, valid_files)
# 2) Move files from dup to main folder
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
target = main_folder / f.name
if not target.exists():
f.rename(target)
# 3) Remove the duplicate folder
try:
shutil.rmtree(dup, ignore_errors=True)
except Exception as e:
print(f"⚠️ Could not delete duplicate folder {dup}: {e}")
# ========== CLEAN MAIN FOLDER ==========
clean_folder(main_folder, valid_files)
# ========== DOWNLOAD MISSING FILES ==========
main_folder.mkdir(parents=True, exist_ok=True)
for filename in valid_files:
dest = main_folder / filename
if dest.exists():
continue
# fetch blob only now
cur_blob.execute(
"SELECT file_content FROM medevio_downloads "
"WHERE request_id=%s AND filename=%s",
(req_id, filename)
)
row = cur_blob.fetchone()
if not row:
continue
content = row[0]
if not content:
continue
with open(dest, "wb") as f:
f.write(content)
print(f"💾 Wrote: {dest.relative_to(BASE_DIR)}")
print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()
conn.close()