Files
medevio/10ReadPozadavky/ReadPozadavkySaveMySql.py
2025-11-08 09:16:10 +01:00

194 lines
5.2 KiB
Python

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
import time
import time, socket
for _ in range(30):
try:
socket.create_connection(("127.0.0.1", 3307), timeout=3).close()
break
except OSError:
time.sleep(10)
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
DONE_LIMIT = 200 # only last 200 DONE
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = r"""
query ClinicRequestGrid_ListPatientRequestsForClinic2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$pageInfo: PageInfo!,
$locale: Locale!,
$state: PatientRequestState
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
pageInfo: $pageInfo,
state: $state
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
}
}
}
"""
# ================================
# 🔑 TOKEN
# ================================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
tok = tok.split(" ", 1)[1]
return tok
# ================================
# 🕒 DATETIME CONVERSION
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
return dt.strftime("%Y-%m-%d %H:%M:%S")
except Exception:
return None
# ================================
# 💾 UPSERT TO MYSQL
# ================================
def upsert(conn, r):
p = (r.get("extendedPatient") or {})
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
to_mysql_dt(r.get("updatedAt")),
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# 📡 FETCH FUNCTION
# ================================
def fetch_requests(headers, state, limit=None):
"""Fetch requests for a given state; optional limit for DONE."""
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": limit or BATCH_SIZE, "offset": 0},
"locale": "cs",
"state": state,
}
payload = {
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json().get("data", {}).get("requestsResponse", {})
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# 🧠 MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Medevio požadavky sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
# --- ACTIVE (all, paginated)
print("\n📡 Fetching all ACTIVE requests...")
offset = 0
total_active = 0
while True:
requests_batch, count = fetch_requests(headers, "ACTIVE", BATCH_SIZE)
if not requests_batch:
break
for r in requests_batch:
upsert(conn, r)
total_active += len(requests_batch)
print(f"{total_active} ACTIVE processed")
if len(requests_batch) < BATCH_SIZE:
break
offset += BATCH_SIZE
time.sleep(0.4)
# --- DONE (only 200 latest)
print("\n📡 Fetching last 200 DONE requests...")
done_requests, done_count = fetch_requests(headers, "DONE", DONE_LIMIT)
for r in done_requests:
upsert(conn, r)
print(f" ✅ DONE processed: {len(done_requests)} (of total {done_count})")
conn.close()
print("\n✅ Sync completed successfully.\n")
if __name__ == "__main__":
main()