Compare commits

..

10 Commits

Author SHA1 Message Date
65c90750dc reporter 2025-12-07 12:11:50 +01:00
4c68c095db Z230 2025-12-05 09:49:59 +01:00
eeea6740ac Z230 2025-12-02 16:50:06 +01:00
c8e58a0246 reporter 2025-12-02 07:31:16 +01:00
f159120175 reporter 2025-12-02 06:24:27 +01:00
f8ada463a2 reporter 2025-11-30 20:21:01 +01:00
ac16eedde9 notebook 2025-11-21 07:14:49 +01:00
8fce419afd Z230 2025-11-20 10:07:30 +01:00
7c185fec68 Z230 2025-11-19 19:59:44 +01:00
michaela.buzalkova
ea32ea0bc1 pohoda 2025-11-17 11:28:31 +01:00
38 changed files with 1124 additions and 3963 deletions

2
.idea/Medevio.iml generated
View File

@@ -4,7 +4,7 @@
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.12" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.13 (Medevio)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

2
.idea/misc.xml generated
View File

@@ -3,5 +3,5 @@
<component name="Black">
<option name="sdkName" value="Python 3.12 (Medevio)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (Medevio)" project-jdk-type="Python SDK" />
</project>

View File

@@ -8,6 +8,18 @@ from datetime import datetime, timezone
import time
from dateutil import parser
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ================================
# 🔧 CONFIGURATION
# ================================
@@ -16,7 +28,7 @@ CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",

View File

@@ -6,16 +6,44 @@ import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ================================
# 🛡 SAFE PRINT FOR CP1250 / Emoji
# ================================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# strip emoji + characters outside BMP
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# final fallback to ASCII only
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300 # stáhneme posledních 300 ukončených požadavků
LIMIT = 300
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
@@ -24,7 +52,7 @@ DB_CONFIG = {
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ Ověřený dotaz s lastMessage
# ⭐ GraphQL query
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
@@ -71,33 +99,31 @@ def read_token(path: Path) -> str:
return tok.split(" ", 1)[1]
return tok
# ================================
# DATETIME PARSER (UTC → MySQL)
# DATETIME PARSER
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() # převede na lokální čas (CET/CEST)
dt = parser.isoparse(iso_str)
dt = dt.astimezone()
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# UPSERT WITH MERGED UPDATED TIME
# UPSERT
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
# API pole
api_updated = to_mysql_dt(r.get("updatedAt"))
# poslední zpráva
last_msg = r.get("lastMessage") or {}
msg_at = to_mysql_dt(last_msg.get("createdAt"))
# vybereme novější čas
def max_dt(a, b):
if a and b:
return max(a, b)
@@ -137,6 +163,7 @@ def upsert(conn, r):
conn.commit()
# ================================
# FETCH LAST 300 DONE REQUESTS
# ================================
@@ -162,6 +189,7 @@ def fetch_done(headers):
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", [])
# ================================
# MAIN
# ================================
@@ -175,17 +203,18 @@ def main():
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
safe_print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers)
print(f"📌 Requests returned: {len(requests_list)}")
safe_print(f"📌 Requests returned: {len(requests_list)}")
for r in requests_list:
upsert(conn, r)
conn.close()
print("\n DONE - latest closed requests synced.\n")
safe_print("\n\u2705 DONE - latest closed requests synced.\n")
# ================================
if __name__ == "__main__":
main()

View File

@@ -12,6 +12,35 @@ import pymysql
from datetime import datetime
from pathlib import Path
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# strip emoji + anything above BMP
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# final ASCII fallback
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# 🔧 CONFIGURATION
@@ -21,7 +50,7 @@ CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql"
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
@@ -31,29 +60,30 @@ DB_CONFIG = {
}
from datetime import datetime
# ==============================
# 🕒 DATETIME FIXER
# ==============================
def fix_datetime(dt_str):
"""Convert ISO 8601 string with 'Z' or ms into MySQL DATETIME format."""
if not dt_str:
return None
try:
# Remove trailing Z and parse flexible ISO format
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
except Exception:
return None
# ✅ Optional: limit which requests to process
CREATED_AFTER = "2025-01-01" # set "" to disable
# Optional filter
CREATED_AFTER = "2025-01-01"
# ==============================
# 🧮 HELPERS
# ==============================
def read_token(p: Path) -> str:
"""Read Bearer token from file."""
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok.split(" ", 1)[1]
return tok
@@ -101,7 +131,7 @@ def fetch_questionnaire(headers, request_id, clinic_slug):
}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
if r.status_code != 200:
print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
safe_print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
return None
return r.json().get("data", {}).get("request")
@@ -118,10 +148,10 @@ def insert_questionnaire(cur, req):
updated_at = fix_datetime(req.get("updatedAt"))
cur.execute("""
INSERT INTO medevio_questionnaires (
INSERT INTO medevio_questionnaires (
request_id, created_at, updated_at, user_note, ecrf_json
)
VALUES (%s,%s,%s,%s,%s)
)
VALUES (%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
updated_at = VALUES(updated_at),
user_note = VALUES(user_note),
@@ -133,8 +163,9 @@ VALUES (%s,%s,%s,%s,%s)
updated_at,
req.get("userNote"),
json.dumps(ecrf_data, ensure_ascii=False),
))
print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
))
safe_print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
# ==============================
@@ -149,6 +180,8 @@ def main():
}
conn = pymysql.connect(**DB_CONFIG)
# load list of requests
with conn.cursor() as cur:
sql = """
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
@@ -163,26 +196,30 @@ def main():
rows = cur.fetchall()
print(f"📋 Found {len(rows)} requests needing questionnaire check.")
safe_print(f"📋 Found {len(rows)} requests needing questionnaire check.")
# process each one
for i, row in enumerate(rows, 1):
req_id = row["id"]
print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
safe_print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
if not req:
print(" ⚠️ No questionnaire data found.")
safe_print(" ⚠️ No questionnaire data found.")
continue
with conn.cursor() as cur:
insert_questionnaire(cur, req)
cur.execute("UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s", (req_id,))
cur.execute(
"UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s",
(req_id,)
)
conn.commit()
time.sleep(0.6) # polite pacing
time.sleep(0.6)
conn.close()
print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
safe_print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
# ==============================

View File

@@ -15,6 +15,34 @@ import pymysql
from pathlib import Path
from datetime import datetime
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc or not enc.lower().startswith("utf"):
# strip emoji + characters outside BMP for Task Scheduler (CP1250)
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# fallback pure ASCII
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# 🔧 CONFIGURATION
@@ -22,7 +50,7 @@ import time
TOKEN_PATH = Path("token.txt")
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
@@ -94,7 +122,7 @@ def fetch_messages(headers, request_id):
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print("❌ HTTP", r.status_code, "for request", request_id)
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
return []
return r.json().get("data", {}).get("messages", []) or []
@@ -158,7 +186,7 @@ def insert_download(cur, req_id, msg, existing_ids):
return
if attachment_id in existing_ids:
return # skip duplicates
return
url = mr.get("downloadUrl") or mr.get("url")
if not url:
@@ -169,7 +197,7 @@ def insert_download(cur, req_id, msg, existing_ids):
r.raise_for_status()
data = r.content
except Exception as e:
print("⚠️ Failed to download:", e)
safe_print(f"⚠️ Failed to download: {e}")
return
filename = url.split("/")[-1].split("?")[0]
@@ -216,7 +244,7 @@ def main():
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
print(f"📦 Already downloaded attachments: {len(existing_ids)}\n")
safe_print(f"📦 Already downloaded attachments: {len(existing_ids)}\n")
# ---- Select pozadavky needing message sync
sql = """
@@ -229,12 +257,12 @@ def main():
cur.execute(sql)
requests_to_process = cur.fetchall()
print(f"📋 Found {len(requests_to_process)} pozadavků requiring message sync.\n")
safe_print(f"📋 Found {len(requests_to_process)} pozadavků requiring message sync.\n")
# ---- Process each pozadavek
# ---- Process each record
for idx, row in enumerate(requests_to_process, 1):
req_id = row["id"]
print(f"[{idx}/{len(requests_to_process)}] Processing {req_id}")
safe_print(f"[{idx}/{len(requests_to_process)}] Processing {req_id}")
messages = fetch_messages(headers, req_id)
@@ -248,11 +276,11 @@ def main():
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
print(f"{len(messages)} messages saved\n")
safe_print(f"{len(messages)} messages saved\n")
time.sleep(0.25)
conn.close()
print("🎉 Done!")
safe_print("🎉 Done!")
if __name__ == "__main__":

View File

@@ -0,0 +1,279 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Fetches messages from Medevio API.
Modes:
- Incremental (default): Only requests where messagesProcessed IS NULL or < updatedAt
- Full resync (--full): Fetches ALL messages for ALL pozadavky
"""
import zlib
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
import argparse
# ==============================
# 🔧 CONFIGURATION
# ==============================
TOKEN_PATH = Path("token.txt")
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY_MESSAGES = r"""
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
id
createdAt
updatedAt
readAt
text
type
sender {
id
name
surname
clinicId
}
medicalRecord {
id
description
contentType
url
downloadUrl
token
createdAt
updatedAt
}
}
}
"""
# ==============================
# ⏱ DATETIME PARSER
# ==============================
def parse_dt(s):
if not s:
return None
try:
return datetime.fromisoformat(s.replace("Z", "+00:00"))
except:
pass
try:
return datetime.strptime(s[:19], "%Y-%m-%dT%H:%M:%S")
except:
return None
# ==============================
# 🔐 TOKEN
# ==============================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
return tok.replace("Bearer ", "")
# ==============================
# 📡 FETCH MESSAGES
# ==============================
def fetch_messages(headers, request_id):
payload = {
"operationName": "UseMessages_ListMessages",
"query": GRAPHQL_QUERY_MESSAGES,
"variables": {"requestId": request_id, "updatedSince": None},
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print("❌ HTTP", r.status_code, "for request", request_id)
return []
return r.json().get("data", {}).get("messages", []) or []
# ==============================
# 💾 SAVE MESSAGE
# ==============================
def insert_message(cur, req_id, msg):
sender = msg.get("sender") or {}
sender_name = " ".join(
x for x in [sender.get("name"), sender.get("surname")] if x
) or None
sql = """
INSERT INTO medevio_conversation (
id, request_id,
sender_name, sender_id, sender_clinic_id,
text, created_at, read_at, updated_at,
attachment_url, attachment_description, attachment_content_type
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
sender_name = VALUES(sender_name),
sender_id = VALUES(sender_id),
sender_clinic_id = VALUES(sender_clinic_id),
text = VALUES(text),
created_at = VALUES(created_at),
read_at = VALUES(read_at),
updated_at = VALUES(updated_at),
attachment_url = VALUES(attachment_url),
attachment_description = VALUES(attachment_description),
attachment_content_type = VALUES(attachment_content_type)
"""
mr = msg.get("medicalRecord") or {}
cur.execute(sql, (
msg.get("id"),
req_id,
sender_name,
sender.get("id"),
sender.get("clinicId"),
msg.get("text"),
parse_dt(msg.get("createdAt")),
parse_dt(msg.get("readAt")),
parse_dt(msg.get("updatedAt")),
mr.get("downloadUrl") or mr.get("url"),
mr.get("description"),
mr.get("contentType")
))
# ==============================
# 💾 DOWNLOAD MESSAGE ATTACHMENT
# ==============================
def insert_download(cur, req_id, msg, existing_ids):
mr = msg.get("medicalRecord") or {}
attachment_id = mr.get("id")
if not attachment_id:
return
if attachment_id in existing_ids:
return # skip duplicates
url = mr.get("downloadUrl") or mr.get("url")
if not url:
return
try:
r = requests.get(url, timeout=30)
r.raise_for_status()
data = r.content
except Exception as e:
print("⚠️ Failed to download:", e)
return
filename = url.split("/")[-1].split("?")[0]
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
filename, content_type, file_size, created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
file_content = VALUES(file_content),
file_size = VALUES(file_size),
downloaded_at = NOW()
""", (
req_id,
attachment_id,
"MESSAGE_ATTACHMENT",
filename,
mr.get("contentType"),
len(data),
parse_dt(msg.get("createdAt")),
data
))
existing_ids.add(attachment_id)
# ==============================
# 🧠 MAIN
# ==============================
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--full", action="store_true", help="Load messages for ALL pozadavky")
# Force full mode ON
args = parser.parse_args(args=["--full"])
# args = parser.parse_args()
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
# ---- Load existing attachments
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
print(f"📦 Already downloaded attachments: {len(existing_ids)}\n")
# ---- Select pozadavky to process
with conn.cursor() as cur:
if args.full:
print("🔁 FULL REFRESH MODE: Fetching messages for ALL pozadavky!\n")
cur.execute("SELECT id FROM pozadavky")
else:
print("📥 Incremental mode: Only syncing updated pozadavky.\n")
cur.execute("""
SELECT id FROM pozadavky
WHERE messagesProcessed IS NULL
OR messagesProcessed < updatedAt
""")
requests_to_process = cur.fetchall()
# =================================
# ⏩ SKIP FIRST 3100 AS YESTERDAY
# =================================
SKIP = 3100
if len(requests_to_process) > SKIP:
print(f"⏩ Skipping first {SKIP} pozadavky (already processed yesterday).")
requests_to_process = requests_to_process[SKIP:]
else:
print("⚠️ Not enough pozadavky to skip!")
print(f"📋 Requests to process: {len(requests_to_process)}\n")
# ---- Process each request
for idx, row in enumerate(requests_to_process, 1):
req_id = row["id"]
print(f"[{idx}/{len(requests_to_process)}] Processing {req_id}")
messages = fetch_messages(headers, req_id)
with conn.cursor() as cur:
for msg in messages:
insert_message(cur, req_id, msg)
insert_download(cur, req_id, msg, existing_ids)
conn.commit()
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
print(f"{len(messages)} messages saved\n")
time.sleep(0.25)
conn.close()
print("🎉 Done!")
if __name__ == "__main__":
main()

View File

@@ -14,6 +14,36 @@ import pymysql
from pathlib import Path
from datetime import datetime
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc or not enc.lower().startswith("utf"):
# strip emoji + characters outside BMP
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# ASCII fallback
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# 🔧 CONFIGURATION
@@ -22,7 +52,7 @@ TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
@@ -67,6 +97,7 @@ def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
# ==============================
# 📡 FETCH ATTACHMENTS
# ==============================
@@ -78,42 +109,40 @@ def fetch_attachments(headers, request_id):
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print(f"❌ HTTP {r.status_code} for request {request_id}")
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
return []
return r.json().get("data", {}).get("patientRequestMedicalRecords", [])
# ==============================
# 💾 SAVE TO MYSQL (clean version)
# 💾 SAVE TO MYSQL
# ==============================
def insert_download(cur, req_id, a, m, created_date, existing_ids):
attachment_id = a.get("id")
if attachment_id in existing_ids:
print(f" ⏭️ Already downloaded {attachment_id}")
safe_print(f" ⏭️ Already downloaded {attachment_id}")
return False
url = m.get("downloadUrl")
if not url:
print(" ⚠️ Missing download URL")
safe_print(" ⚠️ Missing download URL")
return False
filename = extract_filename_from_url(url)
# Download file
try:
r = requests.get(url, timeout=30)
r.raise_for_status()
content = r.content
except Exception as e:
print(f" ⚠️ Download failed {url}: {e}")
safe_print(f" ⚠️ Download failed {url}: {e}")
return False
file_size = len(content)
attachment_type = a.get("attachmentType")
content_type = m.get("contentType")
# 🚨 CLEAN INSERT — no patient_jmeno/no patient_prijmeni
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
@@ -136,7 +165,7 @@ def insert_download(cur, req_id, a, m, created_date, existing_ids):
))
existing_ids.add(attachment_id)
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
safe_print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
return True
@@ -152,11 +181,12 @@ def main():
conn = pymysql.connect(**DB_CONFIG)
# Load existing IDs
# Load existing attachments
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
print(f"{len(existing_ids)} attachments already saved.")
safe_print(f"{len(existing_ids)} attachments already saved.")
# Build query for pozadavky
sql = """
@@ -173,7 +203,7 @@ def main():
cur.execute(sql, params)
req_rows = cur.fetchall()
print(f"📋 Found {len(req_rows)} pozadavky to process.")
safe_print(f"📋 Found {len(req_rows)} pozadavky to process.")
# Process each pozadavek
for i, row in enumerate(req_rows, 1):
@@ -182,12 +212,12 @@ def main():
jmeno = row.get("pacient_jmeno") or ""
created_date = row.get("createdAt") or datetime.now()
print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
safe_print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
attachments = fetch_attachments(headers, req_id)
if not attachments:
print(" ⚠️ No attachments found")
safe_print(" ⚠️ No attachments found")
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
@@ -199,17 +229,16 @@ def main():
insert_download(cur, req_id, a, m, created_date, existing_ids)
conn.commit()
# Mark processed
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
print(f" ✅ Done ({len(attachments)} attachments)")
safe_print(f" ✅ Done ({len(attachments)} attachments)")
time.sleep(0.3)
conn.close()
print("\n🎯 All attachments processed.")
safe_print("\n🎯 All attachments processed.")
# ==============================
if __name__ == "__main__":

View File

@@ -8,12 +8,40 @@ import re
from pathlib import Path
from datetime import datetime
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str = ""):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# Strip emoji and characters outside BMP for Task Scheduler
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# ASCII fallback
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "127.0.0.1",
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
@@ -31,24 +59,15 @@ def sanitize_name(name: str) -> str:
def make_abbrev(title: str) -> str:
"""
Create abbreviation from displayTitle:
- First letter of each word
- Keep digits together
- Uppercase
"""
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
if w.isdigit():
abbr += w
else:
abbr += w[0]
return abbr.upper()
@@ -56,28 +75,20 @@ def make_abbrev(title: str) -> str:
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
"""
Remove unexpected files.
RULE:
- Files starting with `▲` are ALWAYS kept.
"""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
# zpracované soubory (▲filename.pdf) nikdy nemažeme
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
safe_print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
safe_print(f"⚠️ Could not delete {f}: {e}")
# ==============================
@@ -88,7 +99,7 @@ conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading metadata from DB (FAST)…")
safe_print("🔍 Loading metadata from DB (FAST)…")
cur_meta.execute("""
SELECT d.id AS download_id,
@@ -105,13 +116,25 @@ cur_meta.execute("""
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} attachment records.\n")
safe_print(f"📋 Found {len(rows)} attachment records.\n")
# ==============================
# 🧠 MAIN LOOP
# 🧠 MAIN LOOP WITH PROGRESS
# ==============================
unique_request_ids = []
seen = set()
for r in rows:
req_id = r["request_id"]
if req_id not in seen:
unique_request_ids.append(req_id)
seen.add(req_id)
total_requests = len(unique_request_ids)
safe_print(f"🔄 Processing {total_requests} unique requests...\n")
processed_requests = set()
current_index = 0
for r in rows:
req_id = r["request_id"]
@@ -120,14 +143,19 @@ for r in rows:
continue
processed_requests.add(req_id)
# ========== FETCH ALL VALID FILES FOR THIS REQUEST ==========
current_index += 1
percent = (current_index / total_requests) * 100
safe_print(f"\n[ {percent:5.1f}% ] Processing request {current_index} / {total_requests}{req_id}")
# ========== FETCH VALID FILENAMES ==========
cur_meta.execute(
"SELECT filename FROM medevio_downloads WHERE request_id=%s",
(req_id,)
)
valid_files = {sanitize_name(row["filename"]) for row in cur_meta.fetchall()}
# ========== FOLDER NAME BASED ON UPDATEDAT ==========
# ========== BUILD FOLDER NAME ==========
updated_at = r["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
@@ -140,17 +168,14 @@ for r in rows:
f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}"
)
# ========== DETECT EXISTING FOLDER (WITH OR WITHOUT ▲) ==========
# ========== DETECT EXISTING FOLDER ==========
existing_folder = None
folder_has_flag = False
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
existing_folder = f
folder_has_flag = ("" in f.name)
break
# pokud složka existuje → pracujeme v ní
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
# ========== MERGE DUPLICATES ==========
@@ -160,14 +185,13 @@ for r in rows:
]
for dup in possible_dups:
print(f"♻️ Merging duplicate folder: {dup.name}")
safe_print(f"♻️ Merging duplicate folder: {dup.name}")
clean_folder(dup, valid_files)
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
# prostě přesuneme, ▲ případně zůstane v názvu
target = main_folder / f.name
if not target.exists():
f.rename(target)
@@ -185,11 +209,9 @@ for r in rows:
dest_plain = main_folder / filename
dest_marked = main_folder / ("" + filename)
# soubor už existuje (buď filename, nebo ▲filename)
if dest_plain.exists() or dest_marked.exists():
continue
# stáhneme nový soubor → znamená že se má odstranit ▲ složky
added_new_file = True
cur_blob.execute(
@@ -208,31 +230,22 @@ for r in rows:
with open(dest_plain, "wb") as f:
f.write(content)
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
safe_print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
# ==============================
# 🔵 REMOVE FOLDER-LEVEL ▲ ONLY IF NEW FILE ADDED
# ==============================
if added_new_file:
# složka se má přejmenovat bez ▲
if "" in main_folder.name:
new_name = main_folder.name.replace("", "")
new_name = new_name.strip() # pro jistotu
# ========== REMOVE ▲ FLAG IF NEW FILES ADDED ==========
if added_new_file and "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
if new_path != main_folder:
try:
main_folder.rename(new_path)
print(f"🔄 Folder flag ▲ removed → {new_name}")
safe_print(f"🔄 Folder flag ▲ removed → {new_name}")
main_folder = new_path
except Exception as e:
print(f"⚠️ Could not rename folder: {e}")
else:
# žádné nové soubory → NIKDY nesahat na název složky
pass
safe_print(f"⚠️ Could not rename folder: {e}")
print("\n🎯 Export complete.\n")
safe_print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()

View File

@@ -1,6 +1,17 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import importlib.util
import sys
from pathlib import Path
# Load FunctionsLoader
FUNCTIONS_LOADER_PATH = Path(r"C:\Reporting\Functions\FunctionsLoader.py")
spec = importlib.util.spec_from_file_location("FunctionsLoader", FUNCTIONS_LOADER_PATH)
FunctionsLoader = importlib.util.module_from_spec(spec)
sys.modules["FunctionsLoader"] = FunctionsLoader
spec.loader.exec_module(FunctionsLoader)
"""
Spustí všechny PRAVIDELNÉ skripty v daném pořadí:
@@ -12,6 +23,14 @@ Spustí všechny PRAVIDELNÉ skripty v daném pořadí:
5) PRAVIDELNE_5_SaveToFileSystem incremental.py
"""
import time, socket
for _ in range(30):
try:
socket.create_connection(("192.168.1.76", 3307), timeout=3).close()
break
except OSError:
time.sleep(10)
import sys
import subprocess
from pathlib import Path

136
10ReadPozadavky/test.py Normal file
View File

@@ -0,0 +1,136 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Orchestrator for all PRAVIDELNE scripts in exact order.
"""
import time, socket
for _ in range(30):
try:
socket.create_connection(("192.168.1.76", 3307), timeout=3).close()
break
except OSError:
time.sleep(10)
import sys
import subprocess
from pathlib import Path
from datetime import datetime
# =====================================================================
# Import EXACT Functions.py from: C:\Reporting\Fio\Functions.py
# This bypasses all other Functions.py files in the system.
# =====================================================================
import importlib.util
FUNCTIONS_FILE = Path(r"C:\Reporting\Fio\Functions.py")
spec = importlib.util.spec_from_file_location("Functions_FIO", FUNCTIONS_FILE)
Functions_FIO = importlib.util.module_from_spec(spec)
sys.modules["Functions_FIO"] = Functions_FIO
spec.loader.exec_module(Functions_FIO)
# correct WhatsApp function
SendWhatsAppMessage = Functions_FIO.SendWhatsAppMessage
# =====================================================================
# General Orchestrator Settings
# =====================================================================
# folder where orchestrator + sub-scripts live
BASE_DIR = Path(__file__).resolve().parent
SCRIPTS_IN_ORDER = [
"PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py",
"PRAVIDELNE_1_ReadLast300DonePozadavku.py",
"PRAVIDELNE_2_ReadPoznamky.py",
"PRAVIDELNE_3_StahniKomunikaci.py",
"PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py",
"PRAVIDELNE_5_SaveToFileSystem incremental.py",
]
LOG_FILE = BASE_DIR / "PRAVIDELNE_log.txt"
# =====================================================================
# Logging + WhatsApp wrappers
# =====================================================================
def log(msg: str):
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
line = f"[{ts}] {msg}"
print(line)
try:
with LOG_FILE.open("a", encoding="utf-8") as f:
f.write(line + "\n")
except:
pass
def whatsapp_notify(text: str):
"""WhatsApp message wrapper — never allowed to crash orchestrator"""
try:
SendWhatsAppMessage(text)
except:
pass
# =====================================================================
# Main orchestrator
# =====================================================================
def main():
log("=== START pravidelného běhu ===")
whatsapp_notify("🏁 *PRAVIDELNÉ skripty: START*")
for script_name in SCRIPTS_IN_ORDER:
script_path = BASE_DIR / script_name
if not script_path.exists():
err = f"❌ Skript nenalezen: {script_path}"
log(err)
whatsapp_notify(err)
continue
log(f"▶ Spouštím: {script_path.name}")
whatsapp_notify(f"▶ *Spouštím:* {script_path.name}")
try:
result = subprocess.run(
[sys.executable, str(script_path)],
cwd=str(BASE_DIR),
capture_output=True,
text=True,
encoding="utf-8",
errors="ignore",
)
except Exception as e:
err = f"💥 Chyba při spouštění {script_path.name}: {e}"
log(err)
whatsapp_notify(err)
continue
# return code
rc_msg = f"{script_path.name} return code: {result.returncode}"
log(rc_msg)
whatsapp_notify(rc_msg)
# stderr (warnings/errors)
if result.stderr:
err_msg = f"⚠ stderr v {script_path.name}:\n{result.stderr.strip()}"
log(err_msg)
whatsapp_notify(err_msg)
log("=== KONEC pravidelného běhu ===")
whatsapp_notify("✅ *PRAVIDELNÉ skripty: KONEC*\n")
# =====================================================================
# Entry point
# =====================================================================
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,92 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
from pathlib import Path
import sys
# UTF-8 safety
try:
sys.stdout.reconfigure(encoding='utf-8')
except:
pass
# === CONFIG ===
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek který sledujeme
def read_token(path: Path) -> str:
t = path.read_text().strip()
if t.startswith("Bearer "):
return t.split(" ", 1)[1]
return t
# === QUERY ===
QUERY = r"""
query ClinicRequestNotes_Get($patientRequestId: String!) {
notes: getClinicPatientRequestNotes(requestId: $patientRequestId) {
id
content
createdAt
updatedAt
createdBy {
id
name
surname
}
}
}
"""
def run_query(request_id, token):
payload = {
"operationName": "ClinicRequestNotes_Get",
"query": QUERY,
"variables": {"patientRequestId": request_id},
}
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers)
r.raise_for_status()
return r.json()
def main():
token = read_token(TOKEN_PATH)
print(f"🔍 Čtu interní klinické poznámky k požadavku {REQUEST_ID} ...\n")
data = run_query(REQUEST_ID, token)
notes = data.get("data", {}).get("notes", [])
if not notes:
print("📭 Žádné klinické poznámky nejsou uložené.")
return
print(f"📌 Nalezeno {len(notes)} poznámek:\n")
for n in notes:
print("──────────────────────────────")
print(f"🆔 ID: {n['id']}")
print(f"👤 Vytvořil: {n['createdBy']['surname']} {n['createdBy']['name']}")
print(f"📅 createdAt: {n['createdAt']}")
print(f"🕒 updatedAt: {n['updatedAt']}")
print("📝 Obsah:")
print(n['content'])
print("")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,121 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
from pathlib import Path
import sys
# UTF-8 handling
try:
sys.stdout.reconfigure(encoding='utf-8')
except:
pass
# === CONFIG ===
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
REQUEST_ID = "e17536c4-ed22-4242-ada5-d03713e0b7ac" # požadavek
NOTE_PREPEND_TEXT = "🔥 NOVÝ TESTOVACÍ ŘÁDEK\n" # text, který se přidá NA ZAČÁTEK
# === Helpers ===
def read_token(p: Path) -> str:
t = p.read_text().strip()
if t.startswith("Bearer "):
return t.split(" ", 1)[1]
return t
# === Queries ===
QUERY_GET_NOTES = r"""
query ClinicRequestNotes_Get($patientRequestId: String!) {
notes: getClinicPatientRequestNotes(requestId: $patientRequestId) {
id
content
createdAt
updatedAt
createdBy {
id
name
surname
}
}
}
"""
MUTATION_UPDATE_NOTE = r"""
mutation ClinicRequestNotes_Update($noteInput: UpdateClinicPatientRequestNoteInput!) {
updateClinicPatientRequestNote(noteInput: $noteInput) {
id
}
}
"""
# === Core functions ===
def gql(query, variables, token):
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
payload = {"query": query, "variables": variables}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers)
r.raise_for_status()
return r.json()
def get_internal_note(request_id, token):
data = gql(QUERY_GET_NOTES, {"patientRequestId": request_id}, token)
notes = data.get("data", {}).get("notes", [])
return notes[0] if notes else None
def update_internal_note(note_id, new_content, token):
variables = {"noteInput": {"id": note_id, "content": new_content}}
return gql(MUTATION_UPDATE_NOTE, variables, token)
# === MAIN ===
def main():
token = read_token(TOKEN_PATH)
print(f"🔍 Načítám interní poznámku pro požadavek {REQUEST_ID}...\n")
note = get_internal_note(REQUEST_ID, token)
if not note:
print("❌ Nebyla nalezena žádná interní klinická poznámka!")
return
note_id = note["id"]
old_content = note["content"] or ""
print("📄 Původní obsah:")
print(old_content)
print("────────────────────────────\n")
# ===============================
# PREPEND new text
# ===============================
new_content = NOTE_PREPEND_TEXT + old_content
print("📝 Nový obsah který odešlu:")
print(new_content)
print("────────────────────────────\n")
# UPDATE
result = update_internal_note(note_id, new_content, token)
print(f"✅ Hotovo! Poznámka {note_id} aktualizována.")
print(result)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,183 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
import mysql.connector
from pathlib import Path
import sys
# UTF-8 handling
try:
sys.stdout.reconfigure(encoding='utf-8')
except:
pass
# === KONFIGURACE ===
# --- Medevio API ---
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
# --- MySQL DB ---
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
}
# === Helpers ===
def read_token(p: Path) -> str:
"""Načte Bearer token z textového souboru."""
t = p.read_text().strip()
if t.startswith("Bearer "):
return t.split(" ", 1)[1]
return t
# === DB Funkce ===
def get_latest_open_request_id_from_db():
"""
Získá ID, Titul, Jméno a Příjmení nejnovějšího otevřeného požadavku z MySQL.
"""
print("🔍 Připojuji se k MySQL a hledám ID nejnovějšího otevřeného požadavku...")
try:
conn = mysql.connector.connect(**DB_CONFIG)
cursor = conn.cursor()
# SQL dotaz: Nyní vybíráme navíc jméno a příjmení pacienta
query = """
SELECT id, displayTitle, pacient_jmeno, pacient_prijmeni
FROM pozadavky
WHERE doneAt IS NULL
ORDER BY updatedAt DESC
LIMIT 1;
"""
cursor.execute(query)
result = cursor.fetchone()
cursor.close()
conn.close()
if result:
request_id, display_title, jmeno, prijmeni = result
print(f"✅ Nalezen požadavek ID: {request_id} (Titul: {display_title})")
print(f" Pacient: **{prijmeni} {jmeno}**") # Vypíšeme pro snadnou kontrolu
return {
"id": request_id,
"displayTitle": display_title,
"jmeno": jmeno,
"prijmeni": prijmeni
}
print("❌ Nebyl nalezen žádný otevřený požadavek v DB.")
return None
except mysql.connector.Error as err:
print(f"❌ Chyba při připojení/dotazu MySQL: {err}")
return None
# === GraphQL Operace ===
# Tyto GraphQL dotazy jsou beze změny
QUERY_GET_NOTE = r"""
query ClinicRequestNotes_Get($patientRequestId: String!) {
notes: getClinicPatientRequestNotes(requestId: $patientRequestId) {
id
content
}
}
"""
MUTATION_UPDATE_NOTE = r"""
mutation ClinicRequestNotes_Update($noteInput: UpdateClinicPatientRequestNoteInput!) {
updateClinicPatientRequestNote(noteInput: $noteInput) {
id
}
}
"""
def gql(query, variables, token):
"""Obecná funkce pro volání GraphQL endpointu."""
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
payload = {"query": query, "variables": variables}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers)
r.raise_for_status()
return r.json()
def get_internal_note(request_id, token):
"""Získá jedinou interní poznámku (obsah a ID) pro daný požadavek."""
print(f"🔍 Načítám poznámku z Medevia k požadavku {request_id}...")
data = gql(QUERY_GET_NOTE, {"patientRequestId": request_id}, token)
notes = data.get("data", {}).get("notes", [])
if notes:
print("✅ Interní poznámka nalezena.")
return notes[0]
print(f"⚠️ Interní poznámka pro požadavek {request_id} neexistuje.")
return None
def update_internal_note(note_id, new_content, token):
"""Aktualizuje obsah poznámky v Medeviu."""
variables = {"noteInput": {"id": note_id, "content": new_content}}
print(f"📝 Odesílám aktualizaci poznámky {note_id}...")
return gql(MUTATION_UPDATE_NOTE, variables, token)
# === MAIN ===
def main():
token = read_token(TOKEN_PATH)
# 1. Zjistit ID a jméno pacienta z TVÉ DB
latest_request = get_latest_open_request_id_from_db()
if not latest_request:
return
request_id = latest_request["id"]
# 2. Získat existující interní poznámku z Medevia
note = get_internal_note(request_id, token)
if not note:
return
note_id = note["id"]
old_content = note["content"] or ""
# 3. Vytvořit nový obsah (ID požadavku jako první řádek)
# Text, který vložíme na začátek
prepend_text = f"ID DB Synchronizace: {request_id}\n"
new_content = prepend_text + old_content
print("--- Nový obsah který odešlu (začátek) ---")
print(f"-> {prepend_text.strip()}")
print("------------------------------------------")
# 4. Aktualizovat poznámku v Medeviu
try:
update_internal_note(note_id, new_content, token)
print(f"\n✅ Úspěch! Poznámka {note_id} k požadavku {request_id} byla aktualizována v Medeviu.")
print(f" **Zkontroluj požadavek pacienta: {latest_request['prijmeni']} {latest_request['jmeno']}**")
except requests.exceptions.HTTPError as e:
print(f"\n❌ Chyba při aktualizaci Medevio API: {e}")
except Exception as e:
print(f"\n❌ Neočekávaná chyba: {e}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -38,7 +38,7 @@ DB_CONFIG = {
"cursorclass": pymysql.cursors.DictCursor,
}
EXPORT_DIR = Path(r"u:\Dropbox\Ordinace\Reporty")
EXPORT_DIR = Path(r"z:\Dropbox\Ordinace\Reporty")
EXPORT_DIR.mkdir(exist_ok=True, parents=True)
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
xlsx_path = EXPORT_DIR / f"{timestamp} Agenda + Pozadavky (Merged).xlsx"

View File

@@ -96,7 +96,18 @@ def fetch_messages(headers, request_id):
if r.status_code != 200:
print("❌ HTTP", r.status_code, "for request", request_id)
return []
return r.json().get("data", {}).get("messages", []) or []
try:
data = r.json()
except Exception as e:
print(f"❌ Failed to parse JSON for {request_id}: {e}")
print(" Response text:", r.text[:500])
return []
messages = data.get("data", {}).get("messages", []) or []
print(f" 🌐 API returned {len(messages)} messages for {request_id}")
return messages
# ==============================
@@ -218,18 +229,19 @@ def main():
print(f"📦 Already downloaded attachments: {len(existing_ids)}\n")
# ---- Select pozadavky needing message sync
# ---- Select 10 oldest pozadavky (regardless of messagesProcessed)
sql = """
SELECT id
FROM pozadavky
WHERE messagesProcessed IS NULL
OR messagesProcessed < updatedAt
ORDER BY updatedAt ASC
LIMIT 10
"""
with conn.cursor() as cur:
cur.execute(sql)
requests_to_process = cur.fetchall()
print(f"📋 Found {len(requests_to_process)} pozadavků requiring message sync.\n")
print(f"📋 Will process {len(requests_to_process)} oldest pozadavků.\n")
# ---- Process each pozadavek
for idx, row in enumerate(requests_to_process, 1):

View File

@@ -1,122 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Test: Read conversation messages for one request_id and save full JSON.
Uses same logic as the production messages downloader.
"""
import json
import requests
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
TOKEN_PATH = Path("token.txt") # same token file as your working script
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4" # 🧾 replace as needed
OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230") # where to save the JSON
GRAPHQL_QUERY = r"""
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
id
createdAt
updatedAt
readAt
text
type
sender {
id
name
surname
clinicId
}
medicalRecord {
id
description
contentType
url
downloadUrl
token
createdAt
updatedAt
}
}
}
"""
# ==============================
# 🔑 READ TOKEN
# ==============================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
# ==============================
# 🚀 FETCH FROM API
# ==============================
def fetch_messages(headers, request_id):
variables = {"requestId": request_id, "updatedSince": None}
payload = {
"operationName": "UseMessages_ListMessages",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
print("HTTP status:", r.status_code)
if r.status_code != 200:
print("❌ Response preview:", r.text[:500])
return None
return r.json()
# ==============================
# 🧠 MAIN
# ==============================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
data = fetch_messages(headers, REQUEST_ID)
if not data:
print("⚠️ No data returned.")
return
# Save full JSON to file
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
output_path = OUTPUT_DIR / f"messages_{REQUEST_ID}.json"
output_path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8")
print(f"✅ JSON saved to {output_path}")
# Optional: print summary
messages = data.get("data", {}).get("messages", [])
print(f"💬 {len(messages)} messages found:")
print("" * 100)
for msg in messages:
sender = msg.get("sender") or {}
sender_name = " ".join(x for x in [sender.get("name"), sender.get("surname")] if x).strip() or "(unknown)"
text = (msg.get("text") or "").strip().replace("\n", " ")
created = msg.get("createdAt", "")[:16].replace("T", " ")
print(f"[{created}] {sender_name}: {text}")
if msg.get("medicalRecord"):
mr = msg["medicalRecord"]
print(f" 📎 {mr.get('description') or '(no description)'} ({mr.get('contentType')})")
print(f" URL: {mr.get('downloadUrl') or mr.get('url')}")
print("" * 100)
# ==============================
if __name__ == "__main__":
main()

View File

@@ -1,296 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Read conversation messages for pozadavky where messagesProcessed IS NULL
(Optionally filtered by createdAt), insert them into `medevio_conversation`,
and if a message has an attachment (medicalRecord), download it and save into
`medevio_downloads` (same logic as your attachments script).
Finally, mark pozadavky.messagesProcessed = NOW().
"""
import zlib
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
# ==============================
# 🔧 CONFIGURATION
# ==============================
TOKEN_PATH = Path("token.txt")
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ✅ Optional: Only process requests created after this date ("" = no limit)
CREATED_AFTER = "2024-12-01"
GRAPHQL_QUERY_MESSAGES = r"""
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
id
createdAt
updatedAt
readAt
text
type
sender {
id
name
surname
clinicId
}
medicalRecord {
id
description
contentType
url
downloadUrl
token
createdAt
updatedAt
}
}
}
"""
# ==============================
# 🧮 HELPERS
# ==============================
def short_crc8(uuid_str: str) -> str:
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
def extract_filename_from_url(url: str) -> str:
try:
return url.split("/")[-1].split("?")[0]
except Exception:
return "unknown_filename"
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
def parse_dt(s):
if not s:
return None
# handle both "YYYY-mm-ddTHH:MM:SS" and "YYYY-mm-dd HH:MM:SS"
s = s.replace("T", " ")
fmts = ("%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M")
for f in fmts:
try:
return datetime.strptime(s[:19], f)
except Exception:
pass
return None
# ==============================
# 📡 FETCH MESSAGES
# ==============================
def fetch_messages(headers, request_id):
variables = {"requestId": request_id, "updatedSince": None}
payload = {
"operationName": "UseMessages_ListMessages",
"query": GRAPHQL_QUERY_MESSAGES,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print(f"❌ HTTP {r.status_code} for messages of request {request_id}")
return []
data = r.json().get("data", {}).get("messages", [])
return data or []
# ==============================
# 💾 SAVE: conversation row
# ==============================
def insert_message(cur, req_id, msg):
sender = msg.get("sender") or {}
sender_name = " ".join(x for x in [sender.get("name"), sender.get("surname")] if x).strip() or None
sender_id = sender.get("id")
sender_clinic_id = sender.get("clinicId")
text = msg.get("text")
created_at = parse_dt(msg.get("createdAt"))
read_at = parse_dt(msg.get("readAt"))
updated_at = parse_dt(msg.get("updatedAt"))
mr = msg.get("medicalRecord") or {}
attachment_url = mr.get("downloadUrl") or mr.get("url")
attachment_description = mr.get("description")
attachment_content_type = mr.get("contentType")
sql = """
INSERT INTO medevio_conversation (
id, request_id, sender_name, sender_id, sender_clinic_id,
text, created_at, read_at, updated_at,
attachment_url, attachment_description, attachment_content_type
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
sender_name = VALUES(sender_name),
sender_id = VALUES(sender_id),
sender_clinic_id = VALUES(sender_clinic_id),
text = VALUES(text),
created_at = VALUES(created_at),
read_at = VALUES(read_at),
updated_at = VALUES(updated_at),
attachment_url = VALUES(attachment_url),
attachment_description = VALUES(attachment_description),
attachment_content_type = VALUES(attachment_content_type)
"""
cur.execute(sql, (
msg.get("id"),
req_id,
sender_name,
sender_id,
sender_clinic_id,
text,
created_at,
read_at,
updated_at,
attachment_url,
attachment_description,
attachment_content_type
))
# ==============================
# 💾 SAVE: download attachment (from message)
# ==============================
def insert_download_from_message(cur, req_id, msg, existing_ids):
mr = msg.get("medicalRecord") or {}
attachment_id = mr.get("id")
if not attachment_id:
return False
if attachment_id in existing_ids:
print(f" ⏭️ Skipping already downloaded message-attachment {attachment_id}")
return False
url = mr.get("downloadUrl") or mr.get("url")
if not url:
return False
try:
r = requests.get(url, timeout=30)
r.raise_for_status()
content = r.content
except Exception as e:
print(f" ⚠️ Failed to download message attachment {attachment_id}: {e}")
return False
filename = extract_filename_from_url(url)
content_type = mr.get("contentType")
file_size = len(content)
created_date = parse_dt(msg.get("createdAt"))
# We don't have patient names on the message level here; keep NULLs.
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type, filename,
content_type, file_size, pacient_jmeno, pacient_prijmeni,
created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
file_content = VALUES(file_content),
file_size = VALUES(file_size),
downloaded_at = NOW()
""", (
req_id,
attachment_id,
"MESSAGE_ATTACHMENT",
filename,
content_type,
file_size,
None,
None,
created_date,
content
))
existing_ids.add(attachment_id)
print(f" 💾 Saved msg attachment {filename} ({file_size/1024:.1f} kB)")
return True
# ==============================
# 🧠 MAIN
# ==============================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
# Load existing download IDs to skip duplicates (same logic as your script)
print("📦 Loading list of already downloaded attachments...")
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
print(f"✅ Found {len(existing_ids)} attachments already saved.")
# Pull pozadavky where messagesProcessed IS NULL (optionally by createdAt)
sql = """
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
FROM pozadavky
WHERE messagesProcessed IS NULL
"""
params = []
if CREATED_AFTER:
sql += " AND createdAt >= %s"
params.append(CREATED_AFTER)
with conn.cursor() as cur:
cur.execute(sql, params)
rows = cur.fetchall()
print(f"📋 Found {len(rows)} pozadavky to process (messagesProcessed IS NULL"
+ (f", created >= {CREATED_AFTER}" if CREATED_AFTER else "") + ")")
for i, row in enumerate(rows, 1):
req_id = row["id"]
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
jmeno = row.get("pacient_jmeno") or ""
print(f"\n[{i}/{len(rows)}] 💬 {prijmeni}, {jmeno} ({req_id})")
messages = fetch_messages(headers, req_id)
if not messages:
print(" ⚠️ No messages found")
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
continue
inserted = 0
with conn.cursor() as cur:
for msg in messages:
insert_message(cur, req_id, msg)
# also pull any message attachments into downloads table
insert_download_from_message(cur, req_id, msg, existing_ids)
inserted += 1
conn.commit()
# mark processed
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
print(f"{inserted} messages processed for {prijmeni}, {jmeno}")
time.sleep(0.3) # polite API delay
conn.close()
print("\n✅ Done! All new conversations processed and pozadavky updated.")
# ==============================
if __name__ == "__main__":
main()

View File

@@ -1,105 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import zlib
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP1")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
# ==============================
# 📦 STREAMING EXPORT WITH TRIANGLE CHECK
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
cur_meta.execute("""
SELECT id, request_id, attachment_id, filename, pacient_jmeno,
pacient_prijmeni, created_at, downloaded_at
FROM medevio_downloads
WHERE file_content IS NOT NULL;
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} records to check/export")
skipped, exported = 0, 0
for r in rows:
try:
created = r["created_at"] or r["downloaded_at"] or datetime.now()
date_str = created.strftime("%Y-%m-%d")
prijmeni = sanitize_name(r["pacient_prijmeni"] or "Unknown")
jmeno = sanitize_name(r["pacient_jmeno"] or "")
# 🔥 NEW: use full request_id instead of CRC32
full_req_id = sanitize_name(r["request_id"])
# Base (non-triangle) and processed (triangle) folder variants
base_folder = f"{date_str} {prijmeni}, {jmeno} {full_req_id}"
tri_folder = f"{date_str}{prijmeni}, {jmeno} {full_req_id}"
base_folder = sanitize_name(base_folder)
tri_folder = sanitize_name(tri_folder)
base_path = BASE_DIR / base_folder
tri_path = BASE_DIR / tri_folder
filename = sanitize_name(r["filename"] or f"unknown_{r['id']}.bin")
file_path_base = base_path / filename
file_path_tri = tri_path / filename
# 🟡 Skip if exists in either version
if file_path_base.exists() or file_path_tri.exists():
skipped += 1
found_in = "" if file_path_tri.exists() else ""
print(f"⏭️ Skipping existing{found_in}: {filename}")
continue
# Make sure base folder exists before saving
base_path.mkdir(parents=True, exist_ok=True)
# 2⃣ Fetch blob
cur_blob.execute("SELECT file_content FROM medevio_downloads WHERE id = %s", (r["id"],))
blob = cur_blob.fetchone()[0]
if blob:
with open(file_path_base, "wb") as f:
f.write(blob)
exported += 1
print(f"✅ Saved: {file_path_base.relative_to(BASE_DIR)}")
else:
print(f"⚠️ No content for id={r['id']}")
except Exception as e:
print(f"❌ Error for id={r['id']}: {e}")
cur_blob.close()
cur_meta.close()
conn.close()
print(f"\n🎯 Export complete — {exported} new files saved, {skipped} skipped.\n")

View File

@@ -1,96 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
from pathlib import Path
# ============================
# CONFIG
# ============================
TOKEN_PATH = Path("token.txt")
# vlož libovolný existující request ID
REQUEST_ID = "3fc9b28c-ada2-4d21-ab2d-fe60ad29fd8f"
GRAPHQL_NOTES_QUERY = r"""
query ClinicRequestNotes_Get($patientRequestId: String!) {
notes: getClinicPatientRequestNotes(requestId: $patientRequestId) {
id
content
createdAt
updatedAt
createdBy {
id
name
surname
}
}
}
"""
# ============================
# TOKEN
# ============================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
# ============================
# FETCH
# ============================
def fetch_notes(request_id, token):
url = "https://api.medevio.cz/graphql"
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
variables = {"patientRequestId": request_id}
payload = {
"operationName": "ClinicRequestNotes_Get",
"query": GRAPHQL_NOTES_QUERY,
"variables": variables,
}
r = requests.post(url, json=payload, headers=headers)
r.raise_for_status()
return r.json()
# ============================
# MAIN
# ============================
def main():
token = read_token(TOKEN_PATH)
print(f"\n🔍 Fetching NOTES for request:\n ID = {REQUEST_ID}\n")
data = fetch_notes(REQUEST_ID, token)
print("📄 FULL RAW JSON:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
print("\n📝 Parsed notes:\n")
notes = data.get("data", {}).get("notes") or []
if not notes:
print(" (no notes found)")
return
for n in notes:
author = n.get("createdBy")
print(f"--- Note {n.get('id')} ---")
print(f"Created: {n.get('createdAt')}")
print(f"Updated: {n.get('updatedAt')}")
if author:
print(f"Author: {author.get('name')} {author.get('surname')}")
print("Content:")
print(n.get("content"))
print()
if __name__ == "__main__":
main()

View File

@@ -1,216 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Download all attachments for pozadavky where attachmentsProcessed IS NULL
and (optionally) createdAt is newer than a cutoff date.
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed.
"""
import zlib
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
# ==============================
# 🔧 CONFIGURATION
# ==============================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
CREATED_AFTER = "2024-12-01" # optional filter
GRAPHQL_QUERY = r"""
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
patientRequestId: $requestId
pageInfo: {first: 100, offset: 0}
) {
attachmentType
id
medicalRecord {
contentType
description
downloadUrl
id
url
visibleToPatient
}
}
}
"""
# ==============================
# 🧮 HELPERS
# ==============================
def extract_filename_from_url(url: str) -> str:
try:
return url.split("/")[-1].split("?")[0]
except:
return "unknown_filename"
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
# ==============================
# 📡 FETCH ATTACHMENTS
# ==============================
def fetch_attachments(headers, request_id):
payload = {
"operationName": "ClinicRequestDetail_GetPatientRequest2",
"query": GRAPHQL_QUERY,
"variables": {"requestId": request_id},
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print(f"❌ HTTP {r.status_code} for request {request_id}")
return []
return r.json().get("data", {}).get("patientRequestMedicalRecords", [])
# ==============================
# 💾 SAVE TO MYSQL (clean version)
# ==============================
def insert_download(cur, req_id, a, m, created_date, existing_ids):
attachment_id = a.get("id")
if attachment_id in existing_ids:
print(f" ⏭️ Already downloaded {attachment_id}")
return False
url = m.get("downloadUrl")
if not url:
print(" ⚠️ Missing download URL")
return False
filename = extract_filename_from_url(url)
# Download file
try:
r = requests.get(url, timeout=30)
r.raise_for_status()
content = r.content
except Exception as e:
print(f" ⚠️ Download failed {url}: {e}")
return False
file_size = len(content)
attachment_type = a.get("attachmentType")
content_type = m.get("contentType")
# 🚨 CLEAN INSERT — no patient_jmeno/no patient_prijmeni
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
filename, content_type, file_size,
created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
file_content = VALUES(file_content),
file_size = VALUES(file_size),
downloaded_at = NOW()
""", (
req_id,
attachment_id,
attachment_type,
filename,
content_type,
file_size,
created_date,
content,
))
existing_ids.add(attachment_id)
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
return True
# ==============================
# 🧠 MAIN
# ==============================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
# Load existing IDs
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
print(f"{len(existing_ids)} attachments already saved.")
# Build query for pozadavky
sql = """
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt
FROM pozadavky
WHERE attachmentsProcessed IS NULL
"""
params = []
if CREATED_AFTER:
sql += " AND createdAt >= %s"
params.append(CREATED_AFTER)
with conn.cursor() as cur:
cur.execute(sql, params)
req_rows = cur.fetchall()
print(f"📋 Found {len(req_rows)} pozadavky to process.")
# Process each pozadavek
for i, row in enumerate(req_rows, 1):
req_id = row["id"]
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
jmeno = row.get("pacient_jmeno") or ""
created_date = row.get("createdAt") or datetime.now()
print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
attachments = fetch_attachments(headers, req_id)
if not attachments:
print(" ⚠️ No attachments found")
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
continue
with conn.cursor() as cur:
for a in attachments:
m = a.get("medicalRecord") or {}
insert_download(cur, req_id, a, m, created_date, existing_ids)
conn.commit()
# Mark processed
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
print(f" ✅ Done ({len(attachments)} attachments)")
time.sleep(0.3)
conn.close()
print("\n🎯 All attachments processed.")
# ==============================
if __name__ == "__main__":
main()

View File

@@ -1,112 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
from pathlib import Path
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql" # ← správná URL
# 👉 nastav offset zde
OFFSET = 0 # 0 = první pacient, 1 = druhý, 100 = 101. pacient
FIRST = 1 # načteme jen jednoho
QUERY = """
query PatientGridImpl_ListClinicPatients(
$clinicSlug: String!,
$filter: ListPatientFilter!,
$pageInfo: PageInfo!,
$sort: [ListPatientsSort!]
) {
patientsList: listPatients(
clinicSlug: $clinicSlug
filter: $filter
pageInfo: $pageInfo
sort: $sort
) {
count
patients {
id
identificationNumber
insuranceCompanyObject {
id
shortName
}
lastReservation
locale
name
nextReservation
key
phone
sex
status2
surname
type
kind
isInClinic
isUnknownPatient
user {
id
name
surname
phone
registrationCompletedTime
}
owner {
name
surname
}
clinics {
id
name
slug
}
tags(onlyImportant: false) {
id
name
color
icon
}
}
}
}
"""
variables = {
"clinicSlug": CLINIC_SLUG,
"filter": {},
"pageInfo": {
"first": FIRST,
"offset": OFFSET
},
"sort": [
{
"field": "ReverseFullName",
"sort": "ASC"
}
]
}
token = Path("token.txt").read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
print("⏳ Fetching patient...")
response = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers
)
response.raise_for_status()
data = response.json()
print("\n📌 RAW JSON RESPONSE:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))

View File

@@ -1,203 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
import pymysql
from pathlib import Path
# ==============================
# 🔧 KONFIGURACE
# ==============================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql"
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# počet pacientů na stránku
PAGE_SIZE = 100
# ==============================
# 📌 GRAPHQL QUERY
# ==============================
QUERY = """
query PatientGridImpl_ListClinicPatients(
$clinicSlug: String!,
$filter: ListPatientFilter!,
$pageInfo: PageInfo!,
$sort: [ListPatientsSort!]
) {
patientsList: listPatients(
clinicSlug: $clinicSlug
filter: $filter
pageInfo: $pageInfo
sort: $sort
) {
count
patients {
id
identificationNumber
insuranceCompanyObject {
id
shortName
}
name
surname
phone
sex
status2
type
kind
isInClinic
isUnknownPatient
user {
registrationCompletedTime
}
clinics {
id
name
slug
}
tags(onlyImportant: false) {
id
name
color
icon
}
}
}
}
"""
from datetime import datetime
def normalize_dt(dt_str):
if not dt_str:
return None
# remove Z
dt_str = dt_str.replace("Z", "")
# replace T with space
dt_str = dt_str.replace("T", " ")
# remove fractional seconds
if "." in dt_str:
dt_str = dt_str.split(".")[0]
return dt_str # MySQL can accept "YYYY-MM-DD HH:MM:SS"
# ==============================
# 💾 ULOŽENÍ PACIENTA
# ==============================
def save_patient_summary(cur, p):
sql = """
REPLACE INTO medevio_pacienti (
id, jmeno, prijmeni, rodne_cislo, telefon, pohlavi,
pojistovna_id, pojistovna_nazev,
status, typ, kind,
is_in_clinic, is_unknown,
registration_time,
tags_json, clinics_json,
last_update
)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,NOW())
"""
ins = p.get("insuranceCompanyObject") or {}
user = p.get("user") or {}
cur.execute(sql, (
p.get("id"),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
p.get("phone"),
p.get("sex"),
ins.get("id"),
ins.get("shortName"),
p.get("status2"),
p.get("type"),
p.get("kind"),
1 if p.get("isInClinic") else 0,
1 if p.get("isUnknownPatient") else 0,
normalize_dt(user.get("registrationCompletedTime")),
json.dumps(p.get("tags"), ensure_ascii=False),
json.dumps(p.get("clinics"), ensure_ascii=False),
))
# ==============================
# 🧠 MAIN
# ==============================
def main():
token = TOKEN_PATH.read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor()
offset = 0
total = None
print("⏳ Starting patient sync...\n")
while True:
print(f"➡️ Fetching patients {offset}{offset + PAGE_SIZE} ...")
variables = {
"clinicSlug": CLINIC_SLUG,
"filter": {},
"pageInfo": {"first": PAGE_SIZE, "offset": offset},
"sort": [{"field": "ReverseFullName", "sort": "ASC"}],
}
response = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers,
timeout=30
)
response.raise_for_status()
data = response.json()
block = data["data"]["patientsList"]
if total is None:
total = block["count"]
print(f"📌 Total patients: {total}\n")
patients = block["patients"]
if not patients:
print("✅ No more patients. Finished.")
break
# save each patient
for p in patients:
save_patient_summary(cur, p)
conn.commit()
print(f" ✓ Saved {len(patients)} patients.")
offset += PAGE_SIZE
if offset >= total:
print("\n✅ All patients downloaded.")
break
cur.close()
conn.close()
# ==============================
if __name__ == "__main__":
main()

View File

@@ -1,251 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
from pathlib import Path
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
PATIENT_ID = "5a0a9ff0-bbe8-4fc7-a27d-b7b475ee2189"
QUERY = """
query ClinicPatientDetailModal_GetData(
$clinicSlug: String!,
$patientId: String!,
$patientUuid: UUID!,
$challengesStatus: ECRFChallengeStatus!,
$locale: Locale!
) {
clinic: getClinic(clinicSlug: $clinicSlug) {
id
features
sslSUKLCertificateId
type
ais
slug
...ClinicWithTypeAndFeatures_Clinic
...PatientInfo_Clinic
__typename
}
patient: getPatientForClinic(clinicSlug: $clinicSlug, patientId: $patientId) {
...ClinicPatientDetailModal_Patient
__typename
}
challenges: listPatientChallenges2(
clinicSlug: $clinicSlug
patientId: $patientId
status: $challengesStatus
) {
...ChallengeTableList_EcrfChallenge
__typename
}
patientRequestsResponse: filterPatientRequestsForClinic(
clinicSlug: $clinicSlug
filter: {patientId: $patientUuid}
pageInfo: {first: 1, offset: 0}
) {
count
items { id __typename }
__typename
}
treatmentPlanPatients: listTreatmentPlanPatients(
clinicSlug: $clinicSlug
patientId: $patientUuid
) {
...ClinicPlanPatientList_PlanPatient
__typename
}
premiumPlans: listClinicPremiumPlans(clinicSlug: $clinicSlug) {
id
__typename
}
mergeSuggestions: findMergeSuggestions(
clinicSlug: $clinicSlug
input: {existingPatientId: $patientUuid}
) {
...MergeSuggestionAlert_MergeSuggestionResult
__typename
}
insuranceCards: getPatientDocuments(
patientId: $patientUuid
type: InsuranceCard
) {
...PatientInfo_InsuranceCard
__typename
}
}
fragment ClinicWithTypeAndFeatures_Clinic on Clinic {
id
type
features
__typename
}
fragment PatientInfo_Clinic on Clinic {
country
id
slug
ais
...ClinicWithTypeAndFeatures_Clinic
__typename
}
fragment ClinicPatientDetailModal_Patient on ExtendedPatient {
id
isInClinic
kind
name
isUnknownPatient
sex
surname
identificationNumber
editableByDoctor
type
key
user { id name surname __typename }
...ClinicPatientDetail_Patient
...PatientInfo_AccountPatient
...ClinicPatientInfo_Patient
__typename
}
fragment ClinicPatientDetail_Patient on ExtendedPatient {
name
surname
email
id
identificationNumber
isInClinic
key
phone
sex
type
dob
user { id __typename }
isUnknownPatient
hasMobileApp
__typename
}
fragment PatientInfo_AccountPatient on ExtendedPatient {
id
createdAt
key
user {
registrationCompletedTime
deactivatedTime
__typename
}
__typename
}
fragment ClinicPatientInfo_Patient on ExtendedPatient {
anamnesisShared
anamnesisStatusForClinic { updatedAt __typename }
clinics { id name slug __typename }
id
isInClinic
dob
city
familyMembers: family { __typename }
houseNumber
identificationNumber
insuranceCompanyObject { id code name shortName __typename }
kind
name
note
owner { name surname __typename }
key
status
street
surname
user { id email name phone surname __typename }
userRelationship
premiumPlanPatient { id __typename }
sex
tags(onlyImportant: false) { id name color icon __typename }
type
isUnknownPatient
hasMobileApp
__typename
}
fragment ChallengeTableList_EcrfChallenge on ECRFChallenge {
id
createdAt
sentAt
issuedToPatient {
id
identificationNumber
name
surname
__typename
}
userECRF(locale: $locale) { id name __typename }
patientRequestId
status
__typename
}
fragment MergeSuggestionAlert_MergeSuggestionResult on MergeSuggestionResult {
extendedPatient { id __typename }
matchResult
__typename
}
fragment ClinicPlanPatientList_PlanPatient on TreatmentPlanPatient {
id
createdAt
listPatient { id identificationNumber name key status surname __typename }
treatmentPlan { id slug name __typename }
__typename
}
fragment PatientInfo_InsuranceCard on PatientDocument {
id
contentType
url
downloadUrl
__typename
}
"""
def main():
token = TOKEN_PATH.read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
variables = {
"clinicSlug": CLINIC_SLUG,
"patientId": PATIENT_ID,
"patientUuid": PATIENT_ID,
"challengesStatus": "SENT",
"locale": "cs",
}
print("⏳ Fetching patient detail…")
r = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers,
timeout=30
)
r.raise_for_status()
data = r.json()
print("\n📌 RAW DETAIL JSON:\n")
print(json.dumps(data, indent=2, ensure_ascii=False))
if __name__ == "__main__":
main()

View File

@@ -1,365 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
import pymysql
from pathlib import Path
# ==============================
# CONFIG
# ==============================
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
PATIENT_ID = "5a0a9ff0-bbe8-4fc7-a27d-b7b475ee2189"
# ==============================
# HELPERS
# ==============================
def normalize_dt(dt_str):
if not dt_str:
return None
dt_str = dt_str.replace("Z", "").replace("T", " ")
if "." in dt_str:
dt_str = dt_str.split(".")[0]
return dt_str
def save_patient_detail(cur, p):
user = p.get("user") or {}
ins = p.get("insuranceCompanyObject") or {}
tags = p.get("tags") or []
clinics = p.get("clinics") or []
sql = """
UPDATE medevio_pacienti SET
email = %s,
telefon = %s,
dob = %s,
street = %s,
house_number = %s,
city = %s,
user_id = %s,
user_email = %s,
user_name = %s,
user_surname = %s,
user_phone = %s,
user_reg_time = %s,
user_deactivated_time = %s,
created_at = %s,
note = %s,
has_mobile_app = %s,
user_relationship = %s,
pojistovna_code = %s,
tags_json = %s,
clinics_json = %s,
last_update = NOW()
WHERE id = %s
"""
cur.execute(sql, (
p.get("email"),
p.get("phone"),
p.get("dob"),
p.get("street"),
p.get("houseNumber"),
p.get("city"),
user.get("id"),
user.get("email"),
user.get("name"),
user.get("surname"),
user.get("phone"),
normalize_dt(user.get("registrationCompletedTime")),
normalize_dt(user.get("deactivatedTime")),
normalize_dt(p.get("createdAt")),
p.get("note"),
1 if p.get("hasMobileApp") else 0,
p.get("userRelationship"),
ins.get("code"),
json.dumps(tags, ensure_ascii=False),
json.dumps(clinics, ensure_ascii=False),
p.get("id")
))
# ==============================
# FULL EXACT WORKING GRAPHQL QUERY
# ==============================
QUERY = """
query ClinicPatientDetailModal_GetData(
$clinicSlug: String!,
$patientId: String!,
$patientUuid: UUID!,
$challengesStatus: ECRFChallengeStatus!,
$locale: Locale!
) {
clinic: getClinic(clinicSlug: $clinicSlug) {
id
features
sslSUKLCertificateId
type
ais
slug
...ClinicWithTypeAndFeatures_Clinic
...PatientInfo_Clinic
__typename
}
patient: getPatientForClinic(clinicSlug: $clinicSlug, patientId: $patientId) {
...ClinicPatientDetailModal_Patient
__typename
}
challenges: listPatientChallenges2(
clinicSlug: $clinicSlug
patientId: $patientId
status: $challengesStatus
) {
...ChallengeTableList_EcrfChallenge
__typename
}
patientRequestsResponse: filterPatientRequestsForClinic(
clinicSlug: $clinicSlug
filter: {patientId: $patientUuid}
pageInfo: {first: 1, offset: 0}
) {
count
items { id __typename }
__typename
}
treatmentPlanPatients: listTreatmentPlanPatients(
clinicSlug: $clinicSlug
patientId: $patientUuid
) {
...ClinicPlanPatientList_PlanPatient
__typename
}
premiumPlans: listClinicPremiumPlans(clinicSlug: $clinicSlug) {
id
__typename
}
mergeSuggestions: findMergeSuggestions(
clinicSlug: $clinicSlug
input: {existingPatientId: $patientUuid}
) {
...MergeSuggestionAlert_MergeSuggestionResult
__typename
}
insuranceCards: getPatientDocuments(
patientId: $patientUuid
type: InsuranceCard
) {
...PatientInfo_InsuranceCard
__typename
}
}
fragment ClinicWithTypeAndFeatures_Clinic on Clinic {
id
type
features
__typename
}
fragment PatientInfo_Clinic on Clinic {
country
id
slug
ais
...ClinicWithTypeAndFeatures_Clinic
__typename
}
fragment ClinicPatientDetailModal_Patient on ExtendedPatient {
id
isInClinic
kind
name
isUnknownPatient
sex
surname
identificationNumber
editableByDoctor
type
key
user { id name surname __typename }
...ClinicPatientDetail_Patient
...PatientInfo_AccountPatient
...ClinicPatientInfo_Patient
__typename
}
fragment ClinicPatientDetail_Patient on ExtendedPatient {
name
surname
email
id
identificationNumber
isInClinic
key
phone
sex
type
dob
user { id __typename }
isUnknownPatient
hasMobileApp
__typename
}
fragment PatientInfo_AccountPatient on ExtendedPatient {
id
createdAt
key
user {
registrationCompletedTime
deactivatedTime
__typename
}
__typename
}
fragment ClinicPatientInfo_Patient on ExtendedPatient {
anamnesisShared
anamnesisStatusForClinic { updatedAt __typename }
clinics { id name slug __typename }
id
isInClinic
dob
city
familyMembers: family { __typename }
houseNumber
identificationNumber
insuranceCompanyObject { id code name shortName __typename }
kind
name
note
owner { name surname __typename }
key
status
street
surname
user { id email name phone surname __typename }
userRelationship
premiumPlanPatient { id __typename }
sex
tags(onlyImportant: false) { id name color icon __typename }
type
isUnknownPatient
hasMobileApp
__typename
}
fragment ChallengeTableList_EcrfChallenge on ECRFChallenge {
id
createdAt
sentAt
issuedToPatient {
id
identificationNumber
name
surname
__typename
}
userECRF(locale: $locale) { id name __typename }
patientRequestId
status
__typename
}
fragment MergeSuggestionAlert_MergeSuggestionResult on MergeSuggestionResult {
extendedPatient { id __typename }
matchResult
__typename
}
fragment ClinicPlanPatientList_PlanPatient on TreatmentPlanPatient {
id
createdAt
listPatient { id identificationNumber name key status surname __typename }
treatmentPlan { id slug name __typename }
__typename
}
fragment PatientInfo_InsuranceCard on PatientDocument {
id
contentType
url
downloadUrl
__typename
}
"""
# ==============================
# MAIN
# ==============================
def main():
token = TOKEN_PATH.read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
variables = {
"clinicSlug": CLINIC_SLUG,
"patientId": PATIENT_ID,
"patientUuid": PATIENT_ID,
"challengesStatus": "SENT",
"locale": "cs",
}
print(f"⏳ Fetching patient detail {PATIENT_ID}")
r = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers,
timeout=30
)
r.raise_for_status()
data = r.json()
patient = data["data"]["patient"]
if not patient:
print("❌ Patient not found in API response!")
return
print("📥 Patient detail downloaded.")
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor()
save_patient_detail(cur, patient)
conn.commit()
print("✅ Patient detail saved to DB.")
cur.close()
conn.close()
if __name__ == "__main__":
main()

View File

@@ -1,183 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import time
import random
import requests
import pymysql
from pathlib import Path
from datetime import datetime
# Patients with details_updated_at older than this date will be refreshed
# UpdateOlderThan = datetime(2025, 2, 20) # example date
UpdateOlderThan = None #když chceš všechny aktualizovat
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
def normalize_dt(dt_str):
if not dt_str:
return None
dt_str = dt_str.replace("Z", "").replace("T", " ")
if "." in dt_str:
dt_str = dt_str.split(".")[0]
return dt_str
def save_patient_detail(cur, p):
user = p.get("user") or {}
ins = p.get("insuranceCompanyObject") or {}
tags = p.get("tags") or []
clinics = p.get("clinics") or []
sql = """
UPDATE medevio_pacienti SET
email = %s,
telefon = %s,
dob = %s,
street = %s,
house_number = %s,
city = %s,
user_id = %s,
user_email = %s,
user_name = %s,
user_surname = %s,
user_phone = %s,
user_reg_time = %s,
user_deactivated_time = %s,
created_at = %s,
note = %s,
has_mobile_app = %s,
user_relationship = %s,
pojistovna_code = %s,
tags_json = %s,
clinics_json = %s,
last_update = NOW(),
details_updated_at = NOW()
WHERE id = %s
"""
cur.execute(sql, (
p.get("email"),
p.get("phone"),
p.get("dob"),
p.get("street"),
p.get("houseNumber"),
p.get("city"),
user.get("id"),
user.get("email"),
user.get("name"),
user.get("surname"),
user.get("phone"),
normalize_dt(user.get("registrationCompletedTime")),
normalize_dt(user.get("deactivatedTime")),
normalize_dt(p.get("createdAt")),
p.get("note"),
1 if p.get("hasMobileApp") else 0,
p.get("userRelationship"),
ins.get("code"),
json.dumps(tags, ensure_ascii=False),
json.dumps(clinics, ensure_ascii=False),
p.get("id")
))
# === PLACEHOLDER: Insert your full GraphQL query here ===
QUERY = Path("patient_detail.graphql").read_text(encoding="utf-8")
def main():
token = TOKEN_PATH.read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor()
if UpdateOlderThan:
print(f"🔎 Updating patients with details_updated_at NULL or < {UpdateOlderThan}")
cur.execute("""
SELECT id, prijmeni, jmeno, details_updated_at
FROM medevio_pacienti
WHERE details_updated_at IS NULL OR details_updated_at < %s
ORDER BY prijmeni, jmeno
""", (UpdateOlderThan,))
else:
print("🔎 Updating only patients with details_updated_at NULL")
cur.execute("""
SELECT id, prijmeni, jmeno, details_updated_at
FROM medevio_pacienti
WHERE details_updated_at IS NULL
ORDER BY prijmeni, jmeno
""")
patients = cur.fetchall()
total = len(patients)
print(f"⏳ Starting full patient detail sync for {total} patients...")
for idx, row in enumerate(patients, start=1):
pid = row["id"]
name = f"{row.get('prijmeni','')}, {row.get('jmeno','')}"
print(f"[{idx}/{total}] Updating: {name} ({pid})")
variables = {
"clinicSlug": CLINIC_SLUG,
"patientId": pid,
"patientUuid": pid,
"challengesStatus": "SENT",
"locale": "cs",
}
try:
r = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers,
timeout=30
)
r.raise_for_status()
js = r.json()
p = js["data"]["patient"]
if p:
save_patient_detail(cur, p)
conn.commit()
print(" ✔ saved")
else:
print(" ⚠ no patient data returned")
except Exception as e:
print(f" ❌ ERROR: {e}")
time.sleep(2)
continue
time.sleep(random.uniform(0.5, 1.5))
conn.close()
print("✅ DONE full detail sync completed.")
if __name__ == "__main__":
main()

View File

@@ -1,197 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
import time
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$pageInfo: PageInfo!,
$locale: Locale!,
$state: PatientRequestState
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
pageInfo: $pageInfo,
state: $state
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
}
}
}
"""
# ================================
# 🔑 TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
# ================================
# 🕒 DATETIME FORMAT
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# 💾 UPSERT
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
to_mysql_dt(r.get("updatedAt")),
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# 📡 FETCH ACTIVE PAGE
# ================================
def fetch_active(headers, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
"state": "ACTIVE",
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json().get("data", {}).get("requestsResponse", {})
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# 🧠 MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Sync ACTIVE požadavků @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
# -------------------------------
# 🚀 FETCH ALL ACTIVE REQUESTS
# -------------------------------
offset = 0
total_processed = 0
total_count = None
while True:
batch, count = fetch_active(headers, offset)
if total_count is None:
total_count = count
print(f"📡 Celkem ACTIVE v Medevio: {count}")
if not batch:
break
for r in batch:
upsert(conn, r)
total_processed += len(batch)
print(f"{total_processed}/{total_count} ACTIVE processed")
if offset + BATCH_SIZE >= count:
break
offset += BATCH_SIZE
time.sleep(0.4)
conn.close()
print("\n✅ ACTIVE sync hotovo!\n")
if __name__ == "__main__":
main()

View File

@@ -1,87 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
from pathlib import Path
TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql"
CLINIC_SLUG = "mudr-buzalkova"
QUERY = r"""
query ClinicLegacyRequestList_ListPatientRequestsForClinic(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requests: listPatientRequestsForClinic(
clinicSlug: $clinicSlug
queueId: $queueId
queueAssignment: $queueAssignment
state: $state
pageInfo: $pageInfo
) {
id
displayTitle(locale: $locale)
createdAt
doneAt
removedAt
extendedPatient {
name
surname
}
}
}
"""
def main():
token = TOKEN_PATH.read_text().strip()
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"state": "ACTIVE",
"pageInfo": {"first": 200, "offset": 100},
"locale": "cs",
}
print("⏳ Testing ACTIVE request fetch (LEGACY API)…")
r = requests.post(
GRAPHQL_URL,
json={"query": QUERY, "variables": variables},
headers=headers,
timeout=30
)
r.raise_for_status()
js = r.json()
# extract list
requests_list = js.get("data", {}).get("requests", [])
print("\n📌 Number of ACTIVE requests returned:", len(requests_list))
print("\n📌 First 5 request IDs:")
for item in requests_list[:5]:
print("", item.get("id"))
# debug dump if needed
# print(json.dumps(js, indent=2, ensure_ascii=False))
print("\n✅ Test completed.\n")
if __name__ == "__main__":
main()

View File

@@ -1,168 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300 # download the latest 300 requests
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$pageInfo: PageInfo!,
$locale: Locale!,
$state: PatientRequestState
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
pageInfo: $pageInfo,
state: $state
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
}
}
}
"""
# ================================
# TOKEN
# ================================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATE PARSER
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# UPSERT
# ================================
def upsert(conn, r):
p = (r.get("extendedPatient") or {})
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
to_mysql_dt(r.get("updatedAt")),
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH LATEST 300 REQUESTS
# ================================
def fetch_latest_requests(headers):
vars = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0},
"locale": "cs",
"state": "DONE" # ALL STATES (ACTIVE, DONE, REMOVED)
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"query": GRAPHQL_QUERY,
"variables": vars,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", [])
# ================================
# MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Downloading last {LIMIT} requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_latest_requests(headers)
print(f"📌 Requests returned: {len(requests_list)}")
for r in requests_list:
upsert(conn, r)
conn.close()
print("\n✅ Done. Latest requests synced.\n")
if __name__ == "__main__":
main()

View File

@@ -1,226 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import openpyxl
from openpyxl.styles import PatternFill, Font
from openpyxl.utils import get_column_letter
from datetime import datetime
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
OUTPUT_PATH = fr"U:\Dropbox\!!!Days\Downloads Z230\{timestamp} medevio_patients_report.xlsx"
# ============================
# CONFIGURATION
# ============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ============================
# FUNCTIONS
# ============================
from openpyxl.styles import Border, Side
thin_border = Border(
left=Side(style='thin'),
right=Side(style='thin'),
top=Side(style='thin'),
bottom=Side(style='thin')
)
def apply_thin_borders(ws):
"""Apply thin borders to all cells in the worksheet."""
for row in ws.iter_rows():
for cell in row:
cell.border = thin_border
def autofit_columns(ws):
"""Auto-adjust column widths based on longest cell content."""
for col in ws.columns:
max_length = 0
col_letter = get_column_letter(col[0].column)
for cell in col:
try:
if cell.value:
max_length = max(max_length, len(str(cell.value)))
except:
pass
ws.column_dimensions[col_letter].width = max_length + 2
def apply_header_style(ws):
"""Make header BRIGHT YELLOW and bold."""
fill = PatternFill(start_color="FFFF00", end_color="FFFF00", fill_type="solid")
font = Font(bold=True)
for cell in ws[1]:
cell.fill = fill
cell.font = font
def create_compact_row(row):
"""Produce compact record with merged pojistovna, with user_relationship after prijmeni."""
# insurance merged
code = row.get("pojistovna_code") or ""
naz = row.get("pojistovna_nazev") or ""
if code and naz:
poj = f"{code} ({naz})"
elif code:
poj = code
elif naz:
poj = naz
else:
poj = ""
return {
"id": row["id"],
"jmeno": row["jmeno"],
"prijmeni": row["prijmeni"],
# 🔹 inserted here
"user_relationship": row.get("user_relationship"),
"rodne_cislo": row["rodne_cislo"],
"dob": row["dob"],
"telefon": row["telefon"],
"email": row["email"],
"pojistovna": poj,
"status": row["status"],
"has_mobile_app": row["has_mobile_app"],
"registration_time": row["registration_time"],
"last_update": row["last_update"],
}
def create_pozadavky_rows(rows):
"""Convert raw pozadavky SQL rows into rows for the Excel sheet."""
output = []
for r in rows:
output.append({
# 🔹 First the ID
"id": r["id"],
# 🔹 Your 3 patient columns immediately after ID
"pacient_jmeno": r["pacient_jmeno"],
"pacient_prijmeni": r["pacient_prijmeni"],
"pacient_rodnecislo": r["pacient_rodnecislo"],
# 🔹 Then all other fields in any order you prefer
"displayTitle": r["displayTitle"],
"createdAt": r["createdAt"],
"updatedAt": r["updatedAt"],
"doneAt": r["doneAt"],
"removedAt": r["removedAt"],
"attachmentsProcessed": r["attachmentsProcessed"],
"messagesProcessed": r["messagesProcessed"],
"communicationprocessed": r["communicationprocessed"],
"questionnaireprocessed": r["questionnaireprocessed"],
"lastSync": r["lastSync"],
})
return output
# ============================
# MAIN
# ============================
def main():
print("📥 Connecting to MySQL...")
conn = pymysql.connect(**DB_CONFIG)
with conn:
with conn.cursor() as cur:
cur.execute("SELECT * FROM medevio_pacienti ORDER BY prijmeni, jmeno")
patients = cur.fetchall()
print(f"📊 Loaded {len(patients)} patients.")
# Load pozadavky
with conn.cursor() as cur:
cur.execute("SELECT * FROM pozadavky ORDER BY createdAt DESC")
pozadavky_rows = cur.fetchall()
print(f"📄 Loaded {len(pozadavky_rows)} pozadavky.")
wb = openpyxl.Workbook()
# ---------------------------------
# 1) FULL SHEET
# ---------------------------------
ws_full = wb.active
ws_full.title = "Patients FULL"
if patients:
headers = list(patients[0].keys())
ws_full.append(headers)
for row in patients:
ws_full.append([row.get(h) for h in headers])
apply_header_style(ws_full)
ws_full.freeze_panes = "A2"
ws_full.auto_filter.ref = ws_full.dimensions
autofit_columns(ws_full)
apply_thin_borders(ws_full)
# ---------------------------------
# 2) COMPACT SHEET
# ---------------------------------
ws_compact = wb.create_sheet("Patients COMPACT")
compact_rows = [create_compact_row(r) for r in patients]
compact_headers = list(compact_rows[0].keys())
ws_compact.append(compact_headers)
for row in compact_rows:
ws_compact.append([row.get(h) for h in compact_headers])
apply_header_style(ws_compact)
ws_compact.freeze_panes = "A2"
ws_compact.auto_filter.ref = ws_compact.dimensions
autofit_columns(ws_compact)
# >>> ADD THIS <<<
ur_col_index = compact_headers.index("user_relationship") + 1
col_letter = get_column_letter(ur_col_index)
ws_compact.column_dimensions[col_letter].width = 7.14
apply_thin_borders(ws_compact)
# ---------------------------------
# 3) POZADAVKY SHEET
# ---------------------------------
ws_p = wb.create_sheet("Pozadavky")
poz_list = create_pozadavky_rows(pozadavky_rows)
headers_p = list(poz_list[0].keys()) if poz_list else []
if headers_p:
ws_p.append(headers_p)
for row in poz_list:
ws_p.append([row.get(h) for h in headers_p])
apply_header_style(ws_p)
ws_p.freeze_panes = "A2"
ws_p.auto_filter.ref = ws_p.dimensions
autofit_columns(ws_p)
apply_thin_borders(ws_p)
# ---------------------------------
# SAVE
# ---------------------------------
wb.save(OUTPUT_PATH)
print(f"✅ Excel report saved to:\n{OUTPUT_PATH}")
if __name__ == "__main__":
main()

View File

@@ -1,227 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
from pathlib import Path
import json
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
# přesně tvůj původní dotaz, beze změn
# GRAPHQL_QUERY = r"""
# query ClinicRequestGrid_ListPatientRequestsForClinic2(
# $clinicSlug: String!,
# $queueId: String,
# $queueAssignment: QueueAssignmentFilter!,
# $pageInfo: PageInfo!,
# $locale: Locale!,
# $state: PatientRequestState
# ) {
# requestsResponse: listPatientRequestsForClinic2(
# clinicSlug: $clinicSlug,
# queueId: $queueId,
# queueAssignment: $queueAssignment,
# pageInfo: $pageInfo,
# state: $state
# ) {
# count
# patientRequests {
# id
# displayTitle(locale: $locale)
# createdAt
# updatedAt
# doneAt
# removedAt
# extendedPatient {
# name
# surname
# identificationNumber
# }
# }
# }
# }
# """
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug
queueId: $queueId
queueAssignment: $queueAssignment
state: $state
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
### TIME FIELDS ADDED
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
id
identificationNumber
name
surname
kind
key
type
user {
id
name
surname
}
owner {
name
surname
}
dob
premiumPlanPatient {
id
premiumPlan {
id
}
}
status2
tags(onlyImportant: true) {
id
}
isUnknownPatient
}
invoice {
id
status
amount
currency
dueAmount
isOverdue
refundedAmount
settledAmount
}
lastMessage {
createdAt
id
readAt
sender {
id
name
surname
clinicId
}
text
}
priority
queue {
id
name
clinicPatientRequestQueueUsers {
accountable {
id
name
surname
}
id
}
}
reservations {
calendar {
id
internalName
name
}
id
canceledAt
done
start
}
tags(onlyImportant: true) {
id
}
userECRF(locale: $locale) {
id
sid
icon {
color
id
urlSvg
}
ecrfSet {
id
name
}
}
priceWhenCreated
currencyWhenCreated
createdByDoctor
eventType
clinicNotes {
id
}
clinicMedicalRecord
}
}
}
"""
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": BATCH_SIZE, "offset": 0},
"locale": "cs",
"state": "ACTIVE",
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
print("\n===== ČISTÁ ODPOVĚĎ SERVERU =====\n")
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
print(f"HTTP {r.status_code}\n")
print(r.text) # <-- TISK NEUPRAVENÉHO JSONU
print("\n===== KONEC ČISTÉ ODPOVĚDI =====\n")
if __name__ == "__main__":
main()

View File

@@ -1,136 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import requests
from pathlib import Path
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
TARGET_ID = "cbf6000d-a6ca-4059-88b7-dfdc27220762" # ← sem tvoje ID
# ⭐ Updated GraphQL with lastMessage included
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$pageInfo: PageInfo!,
$locale: Locale!,
$state: PatientRequestState
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
pageInfo: $pageInfo,
state: $state
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
lastMessage {
id
createdAt
updatedAt
}
extendedPatient {
name
surname
identificationNumber
}
}
}
}
"""
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
def fetch_active(headers, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
"state": "ACTIVE",
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
if r.status_code != 200:
print("HTTP status:", r.status_code)
print(r.text)
r.raise_for_status()
data = r.json().get("data", {}).get("requestsResponse", {})
return data.get("patientRequests", []), data.get("count", 0)
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json"
}
print(f"=== Hledám updatedAt a lastMessage pro pozadavek {TARGET_ID} ===\n")
offset = 0
total_count = None
found = False
while True:
batch, count = fetch_active(headers, offset)
if total_count is None:
total_count = count
if not batch:
break
for r in batch:
if r["id"] == TARGET_ID:
print("Nalezeno!\n")
print(f"id: {r['id']}")
print(f"updatedAt: {r['updatedAt']}")
lm = r.get("lastMessage") or {}
print(f"lastMessage.createdAt: {lm.get('createdAt')}")
print(f"lastMessage.updatedAt: {lm.get('updatedAt')}")
found = True
break
if found:
break
if offset + BATCH_SIZE >= count:
break
offset += BATCH_SIZE
if not found:
print("❌ Požadavek nebyl nalezen mezi ACTIVE.")
print("\n=== HOTOVO ===")
if __name__ == "__main__":
main()

View File

@@ -1,228 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime, timezone
import time
from dateutil import parser
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ NOVÝ TESTOVANÝ DOTAZ obsahuje lastMessage.createdAt
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# 🧿 SAFE DATETIME PARSER (ALWAYS UTC → LOCAL)
# ================================
def to_mysql_dt_utc(iso_str):
"""
Parse Medevio timestamps safely.
Treat timestamps WITHOUT timezone as UTC.
Convert to local time before saving to MySQL.
"""
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str)
# If tz is missing → assume UTC
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
# Convert to local timezone
dt_local = dt.astimezone()
return dt_local.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# 🔑 TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# 💾 UPSERT (včetně správného updatedAt)
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
# raw timestamps z API nyní přes nový parser
api_updated = to_mysql_dt_utc(r.get("updatedAt"))
last_msg = r.get("lastMessage") or {}
msg_updated = to_mysql_dt_utc(last_msg.get("createdAt"))
# nejnovější změna
def max_dt(a, b):
if a and b:
return max(a, b)
return a or b
final_updated = max_dt(api_updated, msg_updated)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt_utc(r.get("createdAt")),
final_updated,
to_mysql_dt_utc(r.get("doneAt")),
to_mysql_dt_utc(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# 📡 FETCH ACTIVE PAGE
# ================================
def fetch_active(headers, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
"state": "ACTIVE",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json().get("data", {}).get("requestsResponse", {})
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# 🧠 MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Sync ACTIVE požadavků @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
offset = 0
total_processed = 0
total_count = None
while True:
batch, count = fetch_active(headers, offset)
if total_count is None:
total_count = count
print(f"📡 Celkem ACTIVE v Medevio: {count}")
if not batch:
break
for r in batch:
upsert(conn, r)
total_processed += len(batch)
print(f"{total_processed}/{total_count} ACTIVE processed")
if offset + BATCH_SIZE >= count:
break
offset += BATCH_SIZE
time.sleep(0.4)
conn.close()
print("\n✅ ACTIVE sync hotovo!\n")
# ================================
if __name__ == "__main__":
main()

View File

@@ -1,191 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
LIMIT = 300 # stáhneme posledních 300 ukončených požadavků
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ Ověřený dotaz s lastMessage
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATETIME PARSER (UTC → MySQL)
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
dt = dt.astimezone() # převede na lokální čas (CET/CEST)
return dt.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# UPSERT WITH MERGED UPDATED TIME
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
# API pole
api_updated = to_mysql_dt(r.get("updatedAt"))
# poslední zpráva
last_msg = r.get("lastMessage") or {}
msg_at = to_mysql_dt(last_msg.get("createdAt"))
# vybereme novější čas
def max_dt(a, b):
if a and b:
return max(a, b)
return a or b
final_updated = max_dt(api_updated, msg_at)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
final_updated,
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH LAST 300 DONE REQUESTS
# ================================
def fetch_done(headers):
vars = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": LIMIT, "offset": 0},
"locale": "cs",
"state": "DONE",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": vars,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", [])
# ================================
# MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Downloading last {LIMIT} DONE requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
requests_list = fetch_done(headers)
print(f"📌 Requests returned: {len(requests_list)}")
for r in requests_list:
upsert(conn, r)
conn.close()
print("\n✅ DONE - latest closed requests synced.\n")
if __name__ == "__main__":
main()

View File

@@ -1,146 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def clean_folder(folder: Path, valid_files: set):
"""Remove files that do NOT exist in MySQL for this request."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file() and sanitize_name(f.name) not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Cannot delete {f}: {e}")
# ==============================
# 📥 LOAD EVERYTHING IN ONE QUERY
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor(pymysql.cursors.DictCursor)
print("📥 Loading ALL metadata + BLOBs with ONE MySQL query…")
cur.execute("""
SELECT
d.id AS download_id,
d.request_id,
d.filename,
d.file_content,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC, d.created_at ASC
""")
rows = cur.fetchall()
print(f"📦 Loaded {len(rows)} total file rows.\n")
conn.close()
# ==============================
# 🔄 ORGANIZE ROWS PER REQUEST
# ==============================
requests = {} # req_id → list of file dicts
for r in rows:
req_id = r["request_id"]
if req_id not in requests:
requests[req_id] = []
requests[req_id].append(r)
print(f"📌 Unique requests: {len(requests)}\n")
# ==============================
# 🧠 MAIN LOOP SAME LOGIC AS BEFORE
# ==============================
for req_id, filelist in requests.items():
# ========== GET UPDATEDAT (same logic) ==========
any_row = filelist[0]
updated_at = any_row["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(any_row["prijmeni"] or "Unknown")
jmeno = sanitize_name(any_row["jmeno"] or "")
folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} {req_id}")
main_folder = BASE_DIR / folder_name
# ========== VALID FILES ==========
valid_files = {sanitize_name(r["filename"]) for r in filelist}
# ========== FIND OLD FOLDERS ==========
possible_dups = [
f for f in BASE_DIR.iterdir()
if f.is_dir() and req_id in f.name and f != main_folder
]
# ========== MERGE OLD FOLDERS ==========
for dup in possible_dups:
print(f"♻️ Merging folder: {dup.name}")
clean_folder(dup, valid_files)
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
target = main_folder / f.name
if not target.exists():
f.rename(target)
shutil.rmtree(dup, ignore_errors=True)
# ========== CLEAN MAIN FOLDER ==========
main_folder.mkdir(parents=True, exist_ok=True)
clean_folder(main_folder, valid_files)
# ========== SAVE FILES (fast now) ==========
for r in filelist:
filename = sanitize_name(r["filename"])
dest = main_folder / filename
if dest.exists():
continue
content = r["file_content"]
if not content:
continue
with open(dest, "wb") as f:
f.write(content)
print(f"💾 Saved: {dest.relative_to(BASE_DIR)}")
print("\n🎯 Export complete.\n")

View File

@@ -1,102 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
from pathlib import Path
import os
import re
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
OUTPUT = Path(r"d:\Dropbox\Ordinace\Dokumentace_ke_zpracování\mp1")
OUTPUT.mkdir(exist_ok=True)
def sanitize(name: str) -> str:
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
# ==============================
# 📥 LOAD EVERYTHING IN ONE QUERY
# ==============================
def load_all_files():
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor(pymysql.cursors.DictCursor)
print("📥 Loading ALL medevio_downloads including BLOBs… (can take a few seconds)")
cur.execute("""
SELECT
d.id AS download_id,
d.request_id,
d.attachment_id,
d.filename,
d.content_type,
d.file_size,
d.created_at,
p.pacient_jmeno,
p.pacient_prijmeni,
d.file_content
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY d.created_at
""")
rows = cur.fetchall()
conn.close()
print(f"📦 Loaded {len(rows)} BLOB records.")
return rows
# ==============================
# 💾 SAVE ALL TO FILESYSTEM
# ==============================
def save_all(rows):
saved = 0
for r in rows:
req_id = r["request_id"]
jmeno = sanitize(r["pacient_jmeno"] or "")
prijmeni = sanitize(r["pacient_prijmeni"] or "")
filename = sanitize(r["filename"] or f"{r['download_id']}.bin")
# Folder for each request
folder = OUTPUT / f"{prijmeni}, {jmeno} {req_id}"
folder.mkdir(exist_ok=True)
dest = folder / filename
# Skip existing
if dest.exists():
continue
data = r["file_content"]
if not data:
continue
with open(dest, "wb") as f:
f.write(data)
print(f"💾 Saved {dest}")
saved += 1
print(f"\n🎯 Done — {saved} files saved.")
# ==============================
# MAIN
# ==============================
if __name__ == "__main__":
rows = load_all_files() # ONE query
save_all(rows) # then write to disk

View File

@@ -1,173 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
import time
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"d:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
# ==============================
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
"""Remove all files in folder that are NOT present in valid_files."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if sanitize_name(f.name) not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
print("🔍 Loading metadata from DB (FAST)…")
cur_meta.execute("""
SELECT d.id AS download_id,
d.request_id,
d.filename,
d.created_at,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC
""")
rows = cur_meta.fetchall()
print(f"📋 Found {len(rows)} attachment records.\n")
# ==============================
# 🧠 MAIN LOOP
# ==============================
processed_requests = set()
for r in rows:
req_id = r["request_id"]
if req_id in processed_requests:
continue
processed_requests.add(req_id)
# ========== FETCH ALL VALID FILES FOR THIS REQUEST ==========
cur_meta.execute(
"SELECT filename FROM medevio_downloads WHERE request_id=%s",
(req_id,)
)
valid_files = {sanitize_name(row["filename"]) for row in cur_meta.fetchall()}
# ========== FOLDER NAME BASED ON UPDATEDAT ==========
updated_at = r["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
jmeno = sanitize_name(r["jmeno"] or "")
folder_name = f"{date_str} {prijmeni}, {jmeno} {req_id}"
folder_name = sanitize_name(folder_name)
main_folder = BASE_DIR / folder_name
# ========== FIND OLD FOLDER (DUPLICATE) ==========
# Any folder that contains "_<req_id>" and is not main_folder is duplicate
possible_dups = [
f for f in BASE_DIR.iterdir()
if f.is_dir() and req_id in f.name and f != main_folder
]
# ========== MERGE DUPLICATES ==========
for dup in possible_dups:
print(f"♻️ Merging duplicate folder: {dup.name}")
# 1) Clean unexpected files in dup
clean_folder(dup, valid_files)
# 2) Move files from dup to main folder
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
target = main_folder / f.name
if not target.exists():
f.rename(target)
# 3) Remove the duplicate folder
try:
shutil.rmtree(dup, ignore_errors=True)
except Exception as e:
print(f"⚠️ Could not delete duplicate folder {dup}: {e}")
# ========== CLEAN MAIN FOLDER ==========
clean_folder(main_folder, valid_files)
# ========== DOWNLOAD MISSING FILES ==========
main_folder.mkdir(parents=True, exist_ok=True)
for filename in valid_files:
dest = main_folder / filename
if dest.exists():
continue
# fetch blob only now
start = time.perf_counter()
cur_blob.execute(
"SELECT file_content FROM medevio_downloads "
"WHERE request_id=%s AND filename=%s",
(req_id, filename)
)
row = cur_blob.fetchone()
if not row:
continue
end = time.perf_counter()
print(f"⏱ Took {end - start:.4f} seconds")
content = row[0]
if not content:
continue
with open(dest, "wb") as f:
f.write(content)
print(f"💾 Wrote: {dest.relative_to(BASE_DIR)}")
print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()
conn.close()