#!/usr/bin/env python3 # -*- coding: utf-8 -*- import pymysql import requests from pathlib import Path from datetime import datetime from dateutil import parser # ================================ # šŸ”§ CONFIGURATION # ================================ TOKEN_PATH = Path("token.txt") CLINIC_SLUG = "mudr-buzalkova" LIMIT = 500 # batch size / number of records FULL_DOWNLOAD = False # šŸ”„ TOGGLE: False = last X, True = ALL batches DB_CONFIG = { "host": "192.168.1.76", "port": 3307, "user": "root", "password": "Vlado9674+", "database": "medevio", "charset": "utf8mb4", "cursorclass": pymysql.cursors.DictCursor, } # ⭐ Query with lastMessage GRAPHQL_QUERY = r""" query ClinicRequestList2( $clinicSlug: String!, $queueId: String, $queueAssignment: QueueAssignmentFilter!, $state: PatientRequestState, $pageInfo: PageInfo!, $locale: Locale! ) { requestsResponse: listPatientRequestsForClinic2( clinicSlug: $clinicSlug, queueId: $queueId, queueAssignment: $queueAssignment, state: $state, pageInfo: $pageInfo ) { count patientRequests { id displayTitle(locale: $locale) createdAt updatedAt doneAt removedAt extendedPatient { name surname identificationNumber } lastMessage { createdAt } } } } """ # ================================ # TOKEN # ================================ def read_token(path: Path) -> str: tok = path.read_text(encoding="utf-8").strip() return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok # ================================ # DATETIME PARSER (UTC → MySQL) # ================================ def to_mysql_dt(iso_str): if not iso_str: return None try: dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC) dt = dt.astimezone() # convert to local timezone return dt.strftime("%Y-%m-%d %H:%M:%S") except: return None # ================================ # UPSERT REQUEST # ================================ def upsert(conn, r): p = r.get("extendedPatient") or {} api_updated = to_mysql_dt(r.get("updatedAt")) last_msg = r.get("lastMessage") or {} msg_at = to_mysql_dt(last_msg.get("createdAt")) def max_dt(a, b): if a and b: return max(a, b) return a or b final_updated = max_dt(api_updated, msg_at) sql = """ INSERT INTO pozadavky ( id, displayTitle, createdAt, updatedAt, doneAt, removedAt, pacient_jmeno, pacient_prijmeni, pacient_rodnecislo ) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s) ON DUPLICATE KEY UPDATE displayTitle=VALUES(displayTitle), updatedAt=VALUES(updatedAt), doneAt=VALUES(doneAt), removedAt=VALUES(removedAt), pacient_jmeno=VALUES(pacient_jmeno), pacient_prijmeni=VALUES(pacient_prijmeni), pacient_rodnecislo=VALUES(pacient_rodnecislo) """ vals = ( r["id"], r.get("displayTitle"), to_mysql_dt(r.get("createdAt")), final_updated, to_mysql_dt(r.get("doneAt")), to_mysql_dt(r.get("removedAt")), p.get("name"), p.get("surname"), p.get("identificationNumber"), ) with conn.cursor() as cur: cur.execute(sql, vals) conn.commit() # ================================ # FETCH DONE REQUESTS (one batch) # ================================ def fetch_done(headers, offset): vars = { "clinicSlug": CLINIC_SLUG, "queueId": None, "queueAssignment": "ANY", "pageInfo": {"first": LIMIT, "offset": offset}, "locale": "cs", "state": "DONE", } payload = { "operationName": "ClinicRequestList2", "query": GRAPHQL_QUERY, "variables": vars, } r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers) r.raise_for_status() data = r.json()["data"]["requestsResponse"] return data.get("patientRequests", []), data.get("count", 0) # ================================ # MAIN # ================================ def main(): token = read_token(TOKEN_PATH) headers = { "Authorization": f"Bearer {token}", "Content-Type": "application/json", "Accept": "application/json", } conn = pymysql.connect(**DB_CONFIG) print(f"\n=== Sync CLOSED requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===") offset = 0 total_count = None total_processed = 0 while True: batch, count = fetch_done(headers, offset) if total_count is None: total_count = count print(f"šŸ“” Total DONE in Medevio: {count}") if not batch: break print(f" • Processing batch offset={offset} size={len(batch)}") for r in batch: upsert(conn, r) total_processed += len(batch) if not FULL_DOWNLOAD: # process only last LIMIT records break # FULL DOWNLOAD → fetch next batch offset += LIMIT if offset >= count: break conn.close() print(f"\nāœ… DONE — {total_processed} requests synced.\n") if __name__ == "__main__": main()