Z230
This commit is contained in:
190
10ReadPozadavky/PRAVIDELNE_2_ReadPoznamky.py
Normal file
190
10ReadPozadavky/PRAVIDELNE_2_ReadPoznamky.py
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download and store Medevio questionnaires (userNote + eCRF) for all patient requests.
|
||||||
|
Uses the verified working query "GetPatientRequest2".
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
import time
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🔧 CONFIGURATION
|
||||||
|
# ==============================
|
||||||
|
TOKEN_PATH = Path("token.txt")
|
||||||
|
CLINIC_SLUG = "mudr-buzalkova"
|
||||||
|
GRAPHQL_URL = "https://api.medevio.cz/graphql"
|
||||||
|
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "medevio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
"cursorclass": pymysql.cursors.DictCursor,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def fix_datetime(dt_str):
|
||||||
|
"""Convert ISO 8601 string with 'Z' or ms into MySQL DATETIME format."""
|
||||||
|
if not dt_str:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
# Remove trailing Z and parse flexible ISO format
|
||||||
|
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# ✅ Optional: limit which requests to process
|
||||||
|
CREATED_AFTER = "2025-01-01" # set "" to disable
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🧮 HELPERS
|
||||||
|
# ==============================
|
||||||
|
def read_token(p: Path) -> str:
|
||||||
|
"""Read Bearer token from file."""
|
||||||
|
tok = p.read_text(encoding="utf-8").strip()
|
||||||
|
if tok.startswith("Bearer "):
|
||||||
|
tok = tok.split(" ", 1)[1]
|
||||||
|
return tok
|
||||||
|
|
||||||
|
|
||||||
|
GRAPHQL_QUERY = r"""
|
||||||
|
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
|
||||||
|
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
|
||||||
|
id
|
||||||
|
displayTitle(locale: $locale)
|
||||||
|
createdAt
|
||||||
|
updatedAt
|
||||||
|
userNote
|
||||||
|
eventType
|
||||||
|
extendedPatient(clinicSlug: $clinicSlug) {
|
||||||
|
name
|
||||||
|
surname
|
||||||
|
identificationNumber
|
||||||
|
}
|
||||||
|
ecrfFilledData(locale: $locale) {
|
||||||
|
name
|
||||||
|
groups {
|
||||||
|
label
|
||||||
|
fields {
|
||||||
|
name
|
||||||
|
label
|
||||||
|
type
|
||||||
|
value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_questionnaire(headers, request_id, clinic_slug):
|
||||||
|
"""Fetch questionnaire for given request ID."""
|
||||||
|
payload = {
|
||||||
|
"operationName": "GetPatientRequest2",
|
||||||
|
"query": GRAPHQL_QUERY,
|
||||||
|
"variables": {
|
||||||
|
"requestId": request_id,
|
||||||
|
"clinicSlug": clinic_slug,
|
||||||
|
"locale": "cs",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
|
||||||
|
if r.status_code != 200:
|
||||||
|
print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
|
||||||
|
return None
|
||||||
|
return r.json().get("data", {}).get("request")
|
||||||
|
|
||||||
|
|
||||||
|
def insert_questionnaire(cur, req):
|
||||||
|
"""Insert questionnaire data into MySQL."""
|
||||||
|
if not req:
|
||||||
|
return
|
||||||
|
|
||||||
|
patient = req.get("extendedPatient") or {}
|
||||||
|
ecrf_data = req.get("ecrfFilledData")
|
||||||
|
|
||||||
|
created_at = fix_datetime(req.get("createdAt"))
|
||||||
|
updated_at = fix_datetime(req.get("updatedAt"))
|
||||||
|
|
||||||
|
cur.execute("""
|
||||||
|
INSERT INTO medevio_questionnaires (
|
||||||
|
request_id, created_at, updated_at, user_note, ecrf_json
|
||||||
|
)
|
||||||
|
VALUES (%s,%s,%s,%s,%s)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
updated_at = VALUES(updated_at),
|
||||||
|
user_note = VALUES(user_note),
|
||||||
|
ecrf_json = VALUES(ecrf_json),
|
||||||
|
updated_local = NOW()
|
||||||
|
""", (
|
||||||
|
req.get("id"),
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
req.get("userNote"),
|
||||||
|
json.dumps(ecrf_data, ensure_ascii=False),
|
||||||
|
))
|
||||||
|
print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🧠 MAIN
|
||||||
|
# ==============================
|
||||||
|
def main():
|
||||||
|
token = read_token(TOKEN_PATH)
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {token}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
sql = """
|
||||||
|
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
|
||||||
|
FROM pozadavky
|
||||||
|
WHERE (questionnaireprocessed IS NULL OR questionnaireprocessed < updatedAt)
|
||||||
|
"""
|
||||||
|
if CREATED_AFTER:
|
||||||
|
sql += " AND createdAt >= %s"
|
||||||
|
cur.execute(sql, (CREATED_AFTER,))
|
||||||
|
else:
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
rows = cur.fetchall()
|
||||||
|
|
||||||
|
print(f"📋 Found {len(rows)} requests needing questionnaire check.")
|
||||||
|
|
||||||
|
for i, row in enumerate(rows, 1):
|
||||||
|
req_id = row["id"]
|
||||||
|
print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
|
||||||
|
|
||||||
|
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
|
||||||
|
if not req:
|
||||||
|
print(" ⚠️ No questionnaire data found.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
insert_questionnaire(cur, req)
|
||||||
|
cur.execute("UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s", (req_id,))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
time.sleep(0.6) # polite pacing
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -3,8 +3,8 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
||||||
and (optionally) createdAt is newer than a configurable cutoff date.
|
and (optionally) createdAt is newer than a cutoff date.
|
||||||
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed = NOW().
|
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import zlib
|
import zlib
|
||||||
@@ -31,9 +31,7 @@ DB_CONFIG = {
|
|||||||
"cursorclass": pymysql.cursors.DictCursor,
|
"cursorclass": pymysql.cursors.DictCursor,
|
||||||
}
|
}
|
||||||
|
|
||||||
# ✅ Optional: Only process requests created after this date
|
CREATED_AFTER = "2024-12-01" # optional filter
|
||||||
# Leave empty ("") to process all
|
|
||||||
CREATED_AFTER = "2024-12-01" # 🕓 Adjust freely, or set to "" for no limit
|
|
||||||
|
|
||||||
GRAPHQL_QUERY = r"""
|
GRAPHQL_QUERY = r"""
|
||||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||||
@@ -59,74 +57,69 @@ query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
|||||||
# ==============================
|
# ==============================
|
||||||
# 🧮 HELPERS
|
# 🧮 HELPERS
|
||||||
# ==============================
|
# ==============================
|
||||||
def short_crc8(uuid_str: str) -> str:
|
|
||||||
"""Return deterministic 8-char hex string from any input string (CRC32)."""
|
|
||||||
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
|
|
||||||
|
|
||||||
def extract_filename_from_url(url: str) -> str:
|
def extract_filename_from_url(url: str) -> str:
|
||||||
"""Extracts filename from S3-style URL (between last '/' and first '?')."""
|
|
||||||
try:
|
try:
|
||||||
return url.split("/")[-1].split("?")[0]
|
return url.split("/")[-1].split("?")[0]
|
||||||
except Exception:
|
except:
|
||||||
return "unknown_filename"
|
return "unknown_filename"
|
||||||
|
|
||||||
def read_token(p: Path) -> str:
|
def read_token(p: Path) -> str:
|
||||||
"""Read Bearer token from file."""
|
|
||||||
tok = p.read_text(encoding="utf-8").strip()
|
tok = p.read_text(encoding="utf-8").strip()
|
||||||
if tok.startswith("Bearer "):
|
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||||
tok = tok.split(" ", 1)[1]
|
|
||||||
return tok
|
|
||||||
|
|
||||||
# ==============================
|
# ==============================
|
||||||
# 📡 FETCH ATTACHMENTS
|
# 📡 FETCH ATTACHMENTS
|
||||||
# ==============================
|
# ==============================
|
||||||
def fetch_attachments(headers, request_id):
|
def fetch_attachments(headers, request_id):
|
||||||
variables = {"requestId": request_id}
|
|
||||||
payload = {
|
payload = {
|
||||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||||
"query": GRAPHQL_QUERY,
|
"query": GRAPHQL_QUERY,
|
||||||
"variables": variables,
|
"variables": {"requestId": request_id},
|
||||||
}
|
}
|
||||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||||
return []
|
return []
|
||||||
data = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
return r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||||
return data
|
|
||||||
|
|
||||||
# ==============================
|
# ==============================
|
||||||
# 💾 SAVE TO MYSQL (with skip)
|
# 💾 SAVE TO MYSQL (clean version)
|
||||||
# ==============================
|
# ==============================
|
||||||
def insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids):
|
def insert_download(cur, req_id, a, m, created_date, existing_ids):
|
||||||
|
|
||||||
attachment_id = a.get("id")
|
attachment_id = a.get("id")
|
||||||
if attachment_id in existing_ids:
|
if attachment_id in existing_ids:
|
||||||
print(f" ⏭️ Skipping already downloaded attachment {attachment_id}")
|
print(f" ⏭️ Already downloaded {attachment_id}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
url = m.get("downloadUrl")
|
url = m.get("downloadUrl")
|
||||||
if not url:
|
if not url:
|
||||||
print(" ⚠️ No download URL")
|
print(" ⚠️ Missing download URL")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
filename = extract_filename_from_url(url)
|
filename = extract_filename_from_url(url)
|
||||||
|
|
||||||
|
# Download file
|
||||||
try:
|
try:
|
||||||
r = requests.get(url, timeout=30)
|
r = requests.get(url, timeout=30)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
content = r.content
|
content = r.content
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f" ⚠️ Failed to download {url}: {e}")
|
print(f" ⚠️ Download failed {url}: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
file_size = len(content)
|
file_size = len(content)
|
||||||
attachment_type = a.get("attachmentType")
|
attachment_type = a.get("attachmentType")
|
||||||
content_type = m.get("contentType")
|
content_type = m.get("contentType")
|
||||||
|
|
||||||
|
# 🚨 CLEAN INSERT — no patient_jmeno/no patient_prijmeni
|
||||||
cur.execute("""
|
cur.execute("""
|
||||||
INSERT INTO medevio_downloads (
|
INSERT INTO medevio_downloads (
|
||||||
request_id, attachment_id, attachment_type, filename,
|
request_id, attachment_id, attachment_type,
|
||||||
content_type, file_size, pacient_jmeno, pacient_prijmeni,
|
filename, content_type, file_size,
|
||||||
created_at, file_content
|
created_at, file_content
|
||||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
file_content = VALUES(file_content),
|
file_content = VALUES(file_content),
|
||||||
file_size = VALUES(file_size),
|
file_size = VALUES(file_size),
|
||||||
@@ -138,15 +131,15 @@ def insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_i
|
|||||||
filename,
|
filename,
|
||||||
content_type,
|
content_type,
|
||||||
file_size,
|
file_size,
|
||||||
jmeno,
|
|
||||||
prijmeni,
|
|
||||||
created_date,
|
created_date,
|
||||||
content
|
content,
|
||||||
))
|
))
|
||||||
|
|
||||||
existing_ids.add(attachment_id)
|
existing_ids.add(attachment_id)
|
||||||
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# ==============================
|
# ==============================
|
||||||
# 🧠 MAIN
|
# 🧠 MAIN
|
||||||
# ==============================
|
# ==============================
|
||||||
@@ -155,20 +148,19 @@ def main():
|
|||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {token}",
|
"Authorization": f"Bearer {token}",
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Accept": "application/json",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
conn = pymysql.connect(**DB_CONFIG)
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
|
||||||
print("📦 Loading list of already downloaded attachments...")
|
# Load existing IDs
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||||
print(f"✅ Found {len(existing_ids)} attachments already saved.")
|
print(f"✅ {len(existing_ids)} attachments already saved.")
|
||||||
|
|
||||||
# ✅ Dynamic SQL with optional createdAt filter
|
# Build query for pozadavky
|
||||||
sql = """
|
sql = """
|
||||||
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
|
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt
|
||||||
FROM pozadavky
|
FROM pozadavky
|
||||||
WHERE attachmentsProcessed IS NULL
|
WHERE attachmentsProcessed IS NULL
|
||||||
"""
|
"""
|
||||||
@@ -179,25 +171,21 @@ def main():
|
|||||||
|
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
cur.execute(sql, params)
|
cur.execute(sql, params)
|
||||||
rows = cur.fetchall()
|
req_rows = cur.fetchall()
|
||||||
|
|
||||||
print(f"📋 Found {len(rows)} pozadavky to process (attachmentsProcessed IS NULL"
|
print(f"📋 Found {len(req_rows)} pozadavky to process.")
|
||||||
+ (f", created >= {CREATED_AFTER}" if CREATED_AFTER else "") + ")")
|
|
||||||
|
|
||||||
for i, row in enumerate(rows, 1):
|
# Process each pozadavek
|
||||||
|
for i, row in enumerate(req_rows, 1):
|
||||||
req_id = row["id"]
|
req_id = row["id"]
|
||||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||||
jmeno = row.get("pacient_jmeno") or ""
|
jmeno = row.get("pacient_jmeno") or ""
|
||||||
created = row.get("createdAt")
|
created_date = row.get("createdAt") or datetime.now()
|
||||||
|
|
||||||
try:
|
print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||||
created_date = datetime.strptime(str(created), "%Y-%m-%d %H:%M:%S")
|
|
||||||
except Exception:
|
|
||||||
created_date = None
|
|
||||||
|
|
||||||
print(f"\n[{i}/{len(rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
|
||||||
|
|
||||||
attachments = fetch_attachments(headers, req_id)
|
attachments = fetch_attachments(headers, req_id)
|
||||||
|
|
||||||
if not attachments:
|
if not attachments:
|
||||||
print(" ⚠️ No attachments found")
|
print(" ⚠️ No attachments found")
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
@@ -208,19 +196,20 @@ def main():
|
|||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
for a in attachments:
|
for a in attachments:
|
||||||
m = a.get("medicalRecord") or {}
|
m = a.get("medicalRecord") or {}
|
||||||
insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids)
|
insert_download(cur, req_id, a, m, created_date, existing_ids)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
# ✅ mark processed
|
# Mark processed
|
||||||
with conn.cursor() as cur:
|
with conn.cursor() as cur:
|
||||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
print(f" ✅ {len(attachments)} attachments processed for {prijmeni}, {jmeno}")
|
print(f" ✅ Done ({len(attachments)} attachments)")
|
||||||
time.sleep(0.3) # polite API delay
|
|
||||||
|
time.sleep(0.3)
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
print("\n✅ Done! All new attachments processed and pozadavky updated.")
|
print("\n🎯 All attachments processed.")
|
||||||
|
|
||||||
# ==============================
|
# ==============================
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
113
10ReadPozadavky/PRAVIDELNE_5_SaveToFileSystem.py
Normal file
113
10ReadPozadavky/PRAVIDELNE_5_SaveToFileSystem.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import zlib
|
||||||
|
import pymysql
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# ⚙️ CONFIGURATION
|
||||||
|
# ==============================
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "medevio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
}
|
||||||
|
|
||||||
|
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||||
|
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_name(name: str) -> str:
|
||||||
|
"""Replace invalid filename characters with underscore."""
|
||||||
|
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 📦 EXPORT WITH JOIN TO POZADAVKY
|
||||||
|
# ==============================
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||||
|
cur_blob = conn.cursor()
|
||||||
|
|
||||||
|
# 🎯 JOIN medevio_downloads → pozadavky
|
||||||
|
cur_meta.execute("""
|
||||||
|
SELECT d.id, d.request_id, d.attachment_id, d.filename,
|
||||||
|
d.created_at, d.downloaded_at,
|
||||||
|
p.pacient_jmeno AS jmeno,
|
||||||
|
p.pacient_prijmeni AS prijmeni
|
||||||
|
FROM medevio_downloads d
|
||||||
|
JOIN pozadavky p ON d.request_id = p.id
|
||||||
|
WHERE d.file_content IS NOT NULL;
|
||||||
|
""")
|
||||||
|
|
||||||
|
rows = cur_meta.fetchall()
|
||||||
|
print(f"📋 Found {len(rows)} records to check/export")
|
||||||
|
|
||||||
|
skipped, exported = 0, 0
|
||||||
|
|
||||||
|
for r in rows:
|
||||||
|
try:
|
||||||
|
created = r["created_at"] or r["downloaded_at"] or datetime.now()
|
||||||
|
date_str = created.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
# 👍 Now always correct from pozadavky
|
||||||
|
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
|
||||||
|
jmeno = sanitize_name(r["jmeno"] or "")
|
||||||
|
|
||||||
|
# 🔥 Full request_id for folder identification
|
||||||
|
full_req_id = sanitize_name(r["request_id"])
|
||||||
|
|
||||||
|
# Folder names (normal and triangle)
|
||||||
|
base_folder = f"{date_str} {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
tri_folder = f"{date_str}▲ {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
|
||||||
|
base_folder = sanitize_name(base_folder)
|
||||||
|
tri_folder = sanitize_name(tri_folder)
|
||||||
|
|
||||||
|
base_path = BASE_DIR / base_folder
|
||||||
|
tri_path = BASE_DIR / tri_folder
|
||||||
|
|
||||||
|
filename = sanitize_name(r["filename"] or f"unknown_{r['id']}.bin")
|
||||||
|
file_path_base = base_path / filename
|
||||||
|
file_path_tri = tri_path / filename
|
||||||
|
|
||||||
|
# 🟡 Skip if file already exists
|
||||||
|
if file_path_base.exists() or file_path_tri.exists():
|
||||||
|
skipped += 1
|
||||||
|
found_in = "▲" if file_path_tri.exists() else ""
|
||||||
|
print(f"⏭️ Skipping existing{found_in}: {filename}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ensure directory exists
|
||||||
|
base_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# 2️⃣ Fetch blob content
|
||||||
|
cur_blob.execute(
|
||||||
|
"SELECT file_content FROM medevio_downloads WHERE id = %s",
|
||||||
|
(r["id"],)
|
||||||
|
)
|
||||||
|
blob = cur_blob.fetchone()[0]
|
||||||
|
|
||||||
|
if blob:
|
||||||
|
with open(file_path_base, "wb") as f:
|
||||||
|
f.write(blob)
|
||||||
|
exported += 1
|
||||||
|
print(f"✅ Saved: {file_path_base.relative_to(BASE_DIR)}")
|
||||||
|
else:
|
||||||
|
print(f"⚠️ No content for id={r['id']}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error for id={r['id']}: {e}")
|
||||||
|
|
||||||
|
cur_blob.close()
|
||||||
|
cur_meta.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
print(f"\n🎯 Export complete — {exported} new files saved, {skipped} skipped.\n")
|
||||||
0
10ReadPozadavky/PRAVIDELNE_PLNYSCRIPT.py
Normal file
0
10ReadPozadavky/PRAVIDELNE_PLNYSCRIPT.py
Normal file
@@ -1,192 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import pymysql
|
|
||||||
import requests
|
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
import time, socket
|
|
||||||
# for _ in range(30):
|
|
||||||
# try:
|
|
||||||
# socket.create_connection(("127.0.0.1", 3307), timeout=3).close()
|
|
||||||
# break
|
|
||||||
# except OSError:
|
|
||||||
# time.sleep(10)
|
|
||||||
# ================================
|
|
||||||
# 🔧 CONFIGURATION
|
|
||||||
# ================================
|
|
||||||
TOKEN_PATH = Path("token.txt")
|
|
||||||
CLINIC_SLUG = "mudr-buzalkova"
|
|
||||||
BATCH_SIZE = 100
|
|
||||||
DONE_LIMIT = 200 # only last 200 DONE
|
|
||||||
|
|
||||||
DB_CONFIG = {
|
|
||||||
# "host": "127.0.0.1",
|
|
||||||
"host": "192.168.1.76",
|
|
||||||
"port": 3307,
|
|
||||||
"user": "root",
|
|
||||||
"password": "Vlado9674+",
|
|
||||||
"database": "medevio",
|
|
||||||
"charset": "utf8mb4",
|
|
||||||
"cursorclass": pymysql.cursors.DictCursor,
|
|
||||||
}
|
|
||||||
|
|
||||||
GRAPHQL_QUERY = r"""
|
|
||||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
|
||||||
$clinicSlug: String!,
|
|
||||||
$queueId: String,
|
|
||||||
$queueAssignment: QueueAssignmentFilter!,
|
|
||||||
$pageInfo: PageInfo!,
|
|
||||||
$locale: Locale!,
|
|
||||||
$state: PatientRequestState
|
|
||||||
) {
|
|
||||||
requestsResponse: listPatientRequestsForClinic2(
|
|
||||||
clinicSlug: $clinicSlug,
|
|
||||||
queueId: $queueId,
|
|
||||||
queueAssignment: $queueAssignment,
|
|
||||||
pageInfo: $pageInfo,
|
|
||||||
state: $state
|
|
||||||
) {
|
|
||||||
count
|
|
||||||
patientRequests {
|
|
||||||
id
|
|
||||||
displayTitle(locale: $locale)
|
|
||||||
createdAt
|
|
||||||
updatedAt
|
|
||||||
doneAt
|
|
||||||
removedAt
|
|
||||||
extendedPatient {
|
|
||||||
name
|
|
||||||
surname
|
|
||||||
identificationNumber
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
# ================================
|
|
||||||
# 🔑 TOKEN
|
|
||||||
# ================================
|
|
||||||
def read_token(p: Path) -> str:
|
|
||||||
tok = p.read_text(encoding="utf-8").strip()
|
|
||||||
if tok.startswith("Bearer "):
|
|
||||||
tok = tok.split(" ", 1)[1]
|
|
||||||
return tok
|
|
||||||
|
|
||||||
# ================================
|
|
||||||
# 🕒 DATETIME CONVERSION
|
|
||||||
# ================================
|
|
||||||
def to_mysql_dt(iso_str):
|
|
||||||
if not iso_str:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
|
|
||||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# ================================
|
|
||||||
# 💾 UPSERT TO MYSQL
|
|
||||||
# ================================
|
|
||||||
def upsert(conn, r):
|
|
||||||
p = (r.get("extendedPatient") or {})
|
|
||||||
sql = """
|
|
||||||
INSERT INTO pozadavky (
|
|
||||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
|
||||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
|
||||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
|
||||||
ON DUPLICATE KEY UPDATE
|
|
||||||
displayTitle=VALUES(displayTitle),
|
|
||||||
updatedAt=VALUES(updatedAt),
|
|
||||||
doneAt=VALUES(doneAt),
|
|
||||||
removedAt=VALUES(removedAt),
|
|
||||||
pacient_jmeno=VALUES(pacient_jmeno),
|
|
||||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
|
||||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
|
||||||
"""
|
|
||||||
vals = (
|
|
||||||
r.get("id"),
|
|
||||||
r.get("displayTitle"),
|
|
||||||
to_mysql_dt(r.get("createdAt")),
|
|
||||||
to_mysql_dt(r.get("updatedAt")),
|
|
||||||
to_mysql_dt(r.get("doneAt")),
|
|
||||||
to_mysql_dt(r.get("removedAt")),
|
|
||||||
p.get("name"),
|
|
||||||
p.get("surname"),
|
|
||||||
p.get("identificationNumber"),
|
|
||||||
)
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(sql, vals)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
# ================================
|
|
||||||
# 📡 FETCH FUNCTION
|
|
||||||
# ================================
|
|
||||||
def fetch_requests(headers, state, limit=None):
|
|
||||||
"""Fetch requests for a given state; optional limit for DONE."""
|
|
||||||
variables = {
|
|
||||||
"clinicSlug": CLINIC_SLUG,
|
|
||||||
"queueId": None,
|
|
||||||
"queueAssignment": "ANY",
|
|
||||||
"pageInfo": {"first": limit or BATCH_SIZE, "offset": 0},
|
|
||||||
"locale": "cs",
|
|
||||||
"state": state,
|
|
||||||
}
|
|
||||||
payload = {
|
|
||||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
|
||||||
"query": GRAPHQL_QUERY,
|
|
||||||
"variables": variables,
|
|
||||||
}
|
|
||||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
|
||||||
r.raise_for_status()
|
|
||||||
data = r.json().get("data", {}).get("requestsResponse", {})
|
|
||||||
return data.get("patientRequests", []), data.get("count", 0)
|
|
||||||
|
|
||||||
# ================================
|
|
||||||
# 🧠 MAIN
|
|
||||||
# ================================
|
|
||||||
def main():
|
|
||||||
token = read_token(TOKEN_PATH)
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {token}",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"Accept": "application/json",
|
|
||||||
}
|
|
||||||
|
|
||||||
conn = pymysql.connect(**DB_CONFIG)
|
|
||||||
|
|
||||||
print(f"\n=== Medevio požadavky sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
|
||||||
|
|
||||||
# --- ACTIVE (all, paginated)
|
|
||||||
print("\n📡 Fetching all ACTIVE requests...")
|
|
||||||
offset = 0
|
|
||||||
total_active = 0
|
|
||||||
while True:
|
|
||||||
requests_batch, count = fetch_requests(headers, "ACTIVE", BATCH_SIZE)
|
|
||||||
if not requests_batch:
|
|
||||||
break
|
|
||||||
for r in requests_batch:
|
|
||||||
upsert(conn, r)
|
|
||||||
total_active += len(requests_batch)
|
|
||||||
print(f" • {total_active} ACTIVE processed")
|
|
||||||
if len(requests_batch) < BATCH_SIZE:
|
|
||||||
break
|
|
||||||
offset += BATCH_SIZE
|
|
||||||
time.sleep(0.4)
|
|
||||||
|
|
||||||
# --- DONE (only 200 latest)
|
|
||||||
print("\n📡 Fetching last 200 DONE requests...")
|
|
||||||
done_requests, done_count = fetch_requests(headers, "DONE", DONE_LIMIT)
|
|
||||||
for r in done_requests:
|
|
||||||
upsert(conn, r)
|
|
||||||
print(f" ✅ DONE processed: {len(done_requests)} (of total {done_count})")
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
print("\n✅ Sync completed successfully.\n")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -12,7 +12,7 @@ from datetime import datetime
|
|||||||
# ⚙️ CONFIGURATION
|
# ⚙️ CONFIGURATION
|
||||||
# ==============================
|
# ==============================
|
||||||
DB_CONFIG = {
|
DB_CONFIG = {
|
||||||
"host": "127.0.0.1",
|
"host": "192.168.1.76",
|
||||||
"port": 3307,
|
"port": 3307,
|
||||||
"user": "root",
|
"user": "root",
|
||||||
"password": "Vlado9674+",
|
"password": "Vlado9674+",
|
||||||
@@ -20,7 +20,7 @@ DB_CONFIG = {
|
|||||||
"charset": "utf8mb4",
|
"charset": "utf8mb4",
|
||||||
}
|
}
|
||||||
|
|
||||||
BASE_DIR = Path(r"z:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
113
20SaveDownloads/20 SaveToFileSystem.py
Normal file
113
20SaveDownloads/20 SaveToFileSystem.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import zlib
|
||||||
|
import pymysql
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# ⚙️ CONFIGURATION
|
||||||
|
# ==============================
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "medevio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
}
|
||||||
|
|
||||||
|
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||||
|
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_name(name: str) -> str:
|
||||||
|
"""Replace invalid filename characters with underscore."""
|
||||||
|
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 📦 EXPORT WITH JOIN TO POZADAVKY
|
||||||
|
# ==============================
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||||
|
cur_blob = conn.cursor()
|
||||||
|
|
||||||
|
# 🎯 JOIN medevio_downloads → pozadavky
|
||||||
|
cur_meta.execute("""
|
||||||
|
SELECT d.id, d.request_id, d.attachment_id, d.filename,
|
||||||
|
d.created_at, d.downloaded_at,
|
||||||
|
p.pacient_jmeno AS jmeno,
|
||||||
|
p.pacient_prijmeni AS prijmeni
|
||||||
|
FROM medevio_downloads d
|
||||||
|
JOIN pozadavky p ON d.request_id = p.id
|
||||||
|
WHERE d.file_content IS NOT NULL;
|
||||||
|
""")
|
||||||
|
|
||||||
|
rows = cur_meta.fetchall()
|
||||||
|
print(f"📋 Found {len(rows)} records to check/export")
|
||||||
|
|
||||||
|
skipped, exported = 0, 0
|
||||||
|
|
||||||
|
for r in rows:
|
||||||
|
try:
|
||||||
|
created = r["created_at"] or r["downloaded_at"] or datetime.now()
|
||||||
|
date_str = created.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
# 👍 Now always correct from pozadavky
|
||||||
|
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
|
||||||
|
jmeno = sanitize_name(r["jmeno"] or "")
|
||||||
|
|
||||||
|
# 🔥 Full request_id for folder identification
|
||||||
|
full_req_id = sanitize_name(r["request_id"])
|
||||||
|
|
||||||
|
# Folder names (normal and triangle)
|
||||||
|
base_folder = f"{date_str} {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
tri_folder = f"{date_str}▲ {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
|
||||||
|
base_folder = sanitize_name(base_folder)
|
||||||
|
tri_folder = sanitize_name(tri_folder)
|
||||||
|
|
||||||
|
base_path = BASE_DIR / base_folder
|
||||||
|
tri_path = BASE_DIR / tri_folder
|
||||||
|
|
||||||
|
filename = sanitize_name(r["filename"] or f"unknown_{r['id']}.bin")
|
||||||
|
file_path_base = base_path / filename
|
||||||
|
file_path_tri = tri_path / filename
|
||||||
|
|
||||||
|
# 🟡 Skip if file already exists
|
||||||
|
if file_path_base.exists() or file_path_tri.exists():
|
||||||
|
skipped += 1
|
||||||
|
found_in = "▲" if file_path_tri.exists() else ""
|
||||||
|
print(f"⏭️ Skipping existing{found_in}: {filename}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Ensure directory exists
|
||||||
|
base_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# 2️⃣ Fetch blob content
|
||||||
|
cur_blob.execute(
|
||||||
|
"SELECT file_content FROM medevio_downloads WHERE id = %s",
|
||||||
|
(r["id"],)
|
||||||
|
)
|
||||||
|
blob = cur_blob.fetchone()[0]
|
||||||
|
|
||||||
|
if blob:
|
||||||
|
with open(file_path_base, "wb") as f:
|
||||||
|
f.write(blob)
|
||||||
|
exported += 1
|
||||||
|
print(f"✅ Saved: {file_path_base.relative_to(BASE_DIR)}")
|
||||||
|
else:
|
||||||
|
print(f"⚠️ No content for id={r['id']}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error for id={r['id']}: {e}")
|
||||||
|
|
||||||
|
cur_blob.close()
|
||||||
|
cur_meta.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
print(f"\n🎯 Export complete — {exported} new files saved, {skipped} skipped.\n")
|
||||||
105
Testy/03 Test.py
Normal file
105
Testy/03 Test.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import zlib
|
||||||
|
import pymysql
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# ⚙️ CONFIGURATION
|
||||||
|
# ==============================
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "medevio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
}
|
||||||
|
|
||||||
|
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP1")
|
||||||
|
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_name(name: str) -> str:
|
||||||
|
"""Replace invalid filename characters with underscore."""
|
||||||
|
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 📦 STREAMING EXPORT WITH TRIANGLE CHECK
|
||||||
|
# ==============================
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||||
|
cur_blob = conn.cursor()
|
||||||
|
|
||||||
|
cur_meta.execute("""
|
||||||
|
SELECT id, request_id, attachment_id, filename, pacient_jmeno,
|
||||||
|
pacient_prijmeni, created_at, downloaded_at
|
||||||
|
FROM medevio_downloads
|
||||||
|
WHERE file_content IS NOT NULL;
|
||||||
|
""")
|
||||||
|
|
||||||
|
rows = cur_meta.fetchall()
|
||||||
|
print(f"📋 Found {len(rows)} records to check/export")
|
||||||
|
|
||||||
|
skipped, exported = 0, 0
|
||||||
|
|
||||||
|
for r in rows:
|
||||||
|
try:
|
||||||
|
created = r["created_at"] or r["downloaded_at"] or datetime.now()
|
||||||
|
date_str = created.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
prijmeni = sanitize_name(r["pacient_prijmeni"] or "Unknown")
|
||||||
|
jmeno = sanitize_name(r["pacient_jmeno"] or "")
|
||||||
|
|
||||||
|
# 🔥 NEW: use full request_id instead of CRC32
|
||||||
|
full_req_id = sanitize_name(r["request_id"])
|
||||||
|
|
||||||
|
# Base (non-triangle) and processed (triangle) folder variants
|
||||||
|
base_folder = f"{date_str} {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
tri_folder = f"{date_str}▲ {prijmeni}, {jmeno} {full_req_id}"
|
||||||
|
|
||||||
|
base_folder = sanitize_name(base_folder)
|
||||||
|
tri_folder = sanitize_name(tri_folder)
|
||||||
|
|
||||||
|
base_path = BASE_DIR / base_folder
|
||||||
|
tri_path = BASE_DIR / tri_folder
|
||||||
|
|
||||||
|
filename = sanitize_name(r["filename"] or f"unknown_{r['id']}.bin")
|
||||||
|
file_path_base = base_path / filename
|
||||||
|
file_path_tri = tri_path / filename
|
||||||
|
|
||||||
|
# 🟡 Skip if exists in either version
|
||||||
|
if file_path_base.exists() or file_path_tri.exists():
|
||||||
|
skipped += 1
|
||||||
|
found_in = "▲" if file_path_tri.exists() else ""
|
||||||
|
print(f"⏭️ Skipping existing{found_in}: {filename}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Make sure base folder exists before saving
|
||||||
|
base_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# 2️⃣ Fetch blob
|
||||||
|
cur_blob.execute("SELECT file_content FROM medevio_downloads WHERE id = %s", (r["id"],))
|
||||||
|
blob = cur_blob.fetchone()[0]
|
||||||
|
|
||||||
|
if blob:
|
||||||
|
with open(file_path_base, "wb") as f:
|
||||||
|
f.write(blob)
|
||||||
|
exported += 1
|
||||||
|
print(f"✅ Saved: {file_path_base.relative_to(BASE_DIR)}")
|
||||||
|
else:
|
||||||
|
print(f"⚠️ No content for id={r['id']}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error for id={r['id']}: {e}")
|
||||||
|
|
||||||
|
cur_blob.close()
|
||||||
|
cur_meta.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
print(f"\n🎯 Export complete — {exported} new files saved, {skipped} skipped.\n")
|
||||||
216
Testy/04 Test.py
Normal file
216
Testy/04 Test.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
||||||
|
and (optionally) createdAt is newer than a cutoff date.
|
||||||
|
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import zlib
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🔧 CONFIGURATION
|
||||||
|
# ==============================
|
||||||
|
TOKEN_PATH = Path("token.txt")
|
||||||
|
CLINIC_SLUG = "mudr-buzalkova"
|
||||||
|
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "medevio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
"cursorclass": pymysql.cursors.DictCursor,
|
||||||
|
}
|
||||||
|
|
||||||
|
CREATED_AFTER = "2024-12-01" # optional filter
|
||||||
|
|
||||||
|
GRAPHQL_QUERY = r"""
|
||||||
|
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||||
|
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||||
|
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||||
|
patientRequestId: $requestId
|
||||||
|
pageInfo: {first: 100, offset: 0}
|
||||||
|
) {
|
||||||
|
attachmentType
|
||||||
|
id
|
||||||
|
medicalRecord {
|
||||||
|
contentType
|
||||||
|
description
|
||||||
|
downloadUrl
|
||||||
|
id
|
||||||
|
url
|
||||||
|
visibleToPatient
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🧮 HELPERS
|
||||||
|
# ==============================
|
||||||
|
def extract_filename_from_url(url: str) -> str:
|
||||||
|
try:
|
||||||
|
return url.split("/")[-1].split("?")[0]
|
||||||
|
except:
|
||||||
|
return "unknown_filename"
|
||||||
|
|
||||||
|
def read_token(p: Path) -> str:
|
||||||
|
tok = p.read_text(encoding="utf-8").strip()
|
||||||
|
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 📡 FETCH ATTACHMENTS
|
||||||
|
# ==============================
|
||||||
|
def fetch_attachments(headers, request_id):
|
||||||
|
payload = {
|
||||||
|
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||||
|
"query": GRAPHQL_QUERY,
|
||||||
|
"variables": {"requestId": request_id},
|
||||||
|
}
|
||||||
|
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||||
|
if r.status_code != 200:
|
||||||
|
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||||
|
return []
|
||||||
|
return r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 💾 SAVE TO MYSQL (clean version)
|
||||||
|
# ==============================
|
||||||
|
def insert_download(cur, req_id, a, m, created_date, existing_ids):
|
||||||
|
|
||||||
|
attachment_id = a.get("id")
|
||||||
|
if attachment_id in existing_ids:
|
||||||
|
print(f" ⏭️ Already downloaded {attachment_id}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
url = m.get("downloadUrl")
|
||||||
|
if not url:
|
||||||
|
print(" ⚠️ Missing download URL")
|
||||||
|
return False
|
||||||
|
|
||||||
|
filename = extract_filename_from_url(url)
|
||||||
|
|
||||||
|
# Download file
|
||||||
|
try:
|
||||||
|
r = requests.get(url, timeout=30)
|
||||||
|
r.raise_for_status()
|
||||||
|
content = r.content
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ⚠️ Download failed {url}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
file_size = len(content)
|
||||||
|
attachment_type = a.get("attachmentType")
|
||||||
|
content_type = m.get("contentType")
|
||||||
|
|
||||||
|
# 🚨 CLEAN INSERT — no patient_jmeno/no patient_prijmeni
|
||||||
|
cur.execute("""
|
||||||
|
INSERT INTO medevio_downloads (
|
||||||
|
request_id, attachment_id, attachment_type,
|
||||||
|
filename, content_type, file_size,
|
||||||
|
created_at, file_content
|
||||||
|
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
file_content = VALUES(file_content),
|
||||||
|
file_size = VALUES(file_size),
|
||||||
|
downloaded_at = NOW()
|
||||||
|
""", (
|
||||||
|
req_id,
|
||||||
|
attachment_id,
|
||||||
|
attachment_type,
|
||||||
|
filename,
|
||||||
|
content_type,
|
||||||
|
file_size,
|
||||||
|
created_date,
|
||||||
|
content,
|
||||||
|
))
|
||||||
|
|
||||||
|
existing_ids.add(attachment_id)
|
||||||
|
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
# 🧠 MAIN
|
||||||
|
# ==============================
|
||||||
|
def main():
|
||||||
|
token = read_token(TOKEN_PATH)
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {token}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
|
||||||
|
# Load existing IDs
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||||
|
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||||
|
print(f"✅ {len(existing_ids)} attachments already saved.")
|
||||||
|
|
||||||
|
# Build query for pozadavky
|
||||||
|
sql = """
|
||||||
|
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt
|
||||||
|
FROM pozadavky
|
||||||
|
WHERE attachmentsProcessed IS NULL
|
||||||
|
"""
|
||||||
|
params = []
|
||||||
|
if CREATED_AFTER:
|
||||||
|
sql += " AND createdAt >= %s"
|
||||||
|
params.append(CREATED_AFTER)
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(sql, params)
|
||||||
|
req_rows = cur.fetchall()
|
||||||
|
|
||||||
|
print(f"📋 Found {len(req_rows)} pozadavky to process.")
|
||||||
|
|
||||||
|
# Process each pozadavek
|
||||||
|
for i, row in enumerate(req_rows, 1):
|
||||||
|
req_id = row["id"]
|
||||||
|
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||||
|
jmeno = row.get("pacient_jmeno") or ""
|
||||||
|
created_date = row.get("createdAt") or datetime.now()
|
||||||
|
|
||||||
|
print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||||
|
|
||||||
|
attachments = fetch_attachments(headers, req_id)
|
||||||
|
|
||||||
|
if not attachments:
|
||||||
|
print(" ⚠️ No attachments found")
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||||
|
conn.commit()
|
||||||
|
continue
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
for a in attachments:
|
||||||
|
m = a.get("medicalRecord") or {}
|
||||||
|
insert_download(cur, req_id, a, m, created_date, existing_ids)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Mark processed
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
print(f" ✅ Done ({len(attachments)} attachments)")
|
||||||
|
|
||||||
|
time.sleep(0.3)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
print("\n🎯 All attachments processed.")
|
||||||
|
|
||||||
|
# ==============================
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user