notebookvb
This commit is contained in:
@@ -0,0 +1,203 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Fetch Medevio pending (ACTIVE) patient requests and return a pandas DataFrame.
|
||||
Reads Bearer token from token.txt (single line, token only).
|
||||
"""
|
||||
|
||||
import requests
|
||||
import pandas as pd
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any
|
||||
|
||||
# CONFIG ---------------------------------------------------------------------
|
||||
TOKEN_FILE = str(Path(__file__).resolve().parent.parent / "token.txt") # centralized token
|
||||
GRAPHQL_URL = "https://app.medevio.cz/graphql"
|
||||
CLINIC_SLUG = "mudr-buzalkova" # adjust if needed
|
||||
LOCALE = "cs"
|
||||
PAGE_SIZE = 50 # how many items to request per page
|
||||
REQUEST_WAIT = 0.2 # seconds between requests to be polite
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicLegacyRequestList_ListPatientRequestsForClinic(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requests: listPatientRequestsForClinic(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
id
|
||||
createdAt
|
||||
dueDate
|
||||
displayTitle(locale: $locale)
|
||||
doneAt
|
||||
removedAt
|
||||
priority
|
||||
evaluationResult(locale: $locale) {
|
||||
fields {
|
||||
name
|
||||
value
|
||||
}
|
||||
}
|
||||
clinicId
|
||||
extendedPatient {
|
||||
id
|
||||
identificationNumber
|
||||
kind
|
||||
name
|
||||
note
|
||||
owner { name surname }
|
||||
key
|
||||
status
|
||||
surname
|
||||
type
|
||||
user { id name surname }
|
||||
isUnknownPatient
|
||||
}
|
||||
lastMessage {
|
||||
createdAt
|
||||
id
|
||||
readAt
|
||||
sender { id name surname clinicId }
|
||||
text
|
||||
}
|
||||
queue { id name }
|
||||
reservations { id canceledAt done start }
|
||||
tags(onlyImportant: true) { id name color icon }
|
||||
priceWhenCreated
|
||||
currencyWhenCreated
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def read_token(path: str) -> str:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
t = f.read().strip()
|
||||
if t.startswith("Bearer "):
|
||||
t = t.split(" ", 1)[1]
|
||||
return t
|
||||
|
||||
def fetch_requests(token: str,
|
||||
clinic_slug: str = CLINIC_SLUG,
|
||||
locale: str = LOCALE,
|
||||
page_size: int = PAGE_SIZE) -> List[Dict[str, Any]]:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
all_items: List[Dict[str, Any]] = []
|
||||
offset = 0
|
||||
|
||||
while True:
|
||||
variables = {
|
||||
"clinicSlug": clinic_slug,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"state": "ACTIVE",
|
||||
"pageInfo": {"first": page_size, "offset": offset},
|
||||
"locale": locale,
|
||||
}
|
||||
payload = {"query": GRAPHQL_QUERY, "variables": variables, "operationName": "ClinicLegacyRequestList_ListPatientRequestsForClinic"}
|
||||
|
||||
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=30)
|
||||
r.raise_for_status()
|
||||
js = r.json()
|
||||
|
||||
# Basic error handling
|
||||
if "errors" in js:
|
||||
raise RuntimeError(f"GraphQL returned errors: {js['errors']}")
|
||||
|
||||
items = js.get("data", {}).get("requests", [])
|
||||
if not items:
|
||||
break
|
||||
|
||||
all_items.extend(items)
|
||||
|
||||
# If fewer than requested, we are at the end
|
||||
if len(items) < page_size:
|
||||
break
|
||||
|
||||
offset += page_size
|
||||
time.sleep(REQUEST_WAIT)
|
||||
|
||||
return all_items
|
||||
|
||||
def flatten_item(item: Dict[str, Any]) -> Dict[str, Any]:
|
||||
patient = item.get("extendedPatient") or {}
|
||||
last_msg = item.get("lastMessage") or {}
|
||||
queue = item.get("queue") or {}
|
||||
|
||||
# evaluationResult fields -> map of name:value (if exists)
|
||||
eval_map = {}
|
||||
eval_block = item.get("evaluationResult") or {}
|
||||
for fld in (eval_block.get("fields") or []):
|
||||
name = fld.get("name")
|
||||
value = fld.get("value")
|
||||
if name:
|
||||
eval_map[name] = value
|
||||
|
||||
flat = {
|
||||
"id": item.get("id"),
|
||||
"createdAt": item.get("createdAt"),
|
||||
"dueDate": item.get("dueDate"),
|
||||
"displayTitle": item.get("displayTitle"),
|
||||
"doneAt": item.get("doneAt"),
|
||||
"removedAt": item.get("removedAt"),
|
||||
"priority": item.get("priority"),
|
||||
"clinicId": item.get("clinicId"),
|
||||
"patient_id": patient.get("id"),
|
||||
"patient_identificationNumber": patient.get("identificationNumber"),
|
||||
"patient_name": patient.get("name"),
|
||||
"patient_surname": patient.get("surname"),
|
||||
"patient_status": patient.get("status"),
|
||||
"lastMessage_id": last_msg.get("id"),
|
||||
"lastMessage_createdAt": last_msg.get("createdAt"),
|
||||
"lastMessage_text": last_msg.get("text"),
|
||||
"queue_id": queue.get("id"),
|
||||
"queue_name": queue.get("name"),
|
||||
"priceWhenCreated": item.get("priceWhenCreated"),
|
||||
"currencyWhenCreated": item.get("currencyWhenCreated"),
|
||||
}
|
||||
|
||||
# merge evaluation fields (if any) prefixed by "eval_"
|
||||
for k, v in eval_map.items():
|
||||
flat[f"eval_{k}"] = v
|
||||
|
||||
return flat
|
||||
|
||||
def to_dataframe(items: List[Dict[str, Any]]) -> pd.DataFrame:
|
||||
rows = [flatten_item(it) for it in items]
|
||||
df = pd.DataFrame(rows)
|
||||
# try parsing dates
|
||||
for col in ("createdAt", "dueDate", "doneAt", "lastMessage_createdAt", "removedAt"):
|
||||
if col in df.columns:
|
||||
df[col] = pd.to_datetime(df[col], errors="coerce")
|
||||
return df
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_FILE)
|
||||
print("Fetching pending (ACTIVE) requests from Medevio...")
|
||||
items = fetch_requests(token)
|
||||
print(f"Fetched {len(items)} items.")
|
||||
df = to_dataframe(items)
|
||||
pd.set_option("display.max_rows", 20)
|
||||
pd.set_option("display.max_colwidth", 160)
|
||||
print(df.head(50))
|
||||
# optionally save
|
||||
df.to_excel("medevio_pending_requests.xlsx", index=False)
|
||||
print("Saved medevio_pending_requests.xlsx")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,104 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import requests # 👈 this is new
|
||||
|
||||
# --- Settings ----------------------------------------------------
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
SHOW_FULL_TOKEN = False # set True if you want to print the full token
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicLegacyRequestList_ListPatientRequestsForClinic(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requests: listPatientRequestsForClinic(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
id
|
||||
createdAt
|
||||
dueDate
|
||||
displayTitle(locale: $locale)
|
||||
doneAt
|
||||
removedAt
|
||||
priority
|
||||
evaluationResult(locale: $locale) { fields { name value } }
|
||||
clinicId
|
||||
extendedPatient {
|
||||
id
|
||||
identificationNumber
|
||||
kind
|
||||
name
|
||||
surname
|
||||
status
|
||||
isUnknownPatient
|
||||
}
|
||||
lastMessage { id text createdAt }
|
||||
queue { id name }
|
||||
reservations { id canceledAt done start }
|
||||
tags(onlyImportant: true) { id name color icon }
|
||||
priceWhenCreated
|
||||
currencyWhenCreated
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"state": "ACTIVE", # pending / nevyřízené
|
||||
"pageInfo": {"first": 30, "offset": 0},
|
||||
"locale": "cs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicLegacyRequestList_ListPatientRequestsForClinic",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
# === Actually call Medevio API ==================================
|
||||
print("📡 Querying Medevio GraphQL API...\n")
|
||||
url = "https://api.medevio.cz/graphql"
|
||||
r = requests.post(url, json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
|
||||
# --- Try to decode JSON
|
||||
try:
|
||||
data = r.json()
|
||||
print("=== Raw JSON response ===")
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
except Exception as e:
|
||||
print("❌ Failed to decode JSON:", e)
|
||||
print("Raw text:\n", r.text)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import requests
|
||||
|
||||
# --- Settings ----------------------------------------------------
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicLegacyRequestList_ListPatientRequestsForClinic(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requests: listPatientRequestsForClinic(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"state": "ACTIVE", # pending / nevyřízené
|
||||
"pageInfo": {"first": 30, "offset": 0},
|
||||
"locale": "cs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicLegacyRequestList_ListPatientRequestsForClinic",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
url = "https://api.medevio.cz/graphql"
|
||||
print("📡 Querying Medevio GraphQL API...\n")
|
||||
r = requests.post(url, json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
|
||||
# --- Parse JSON safely
|
||||
try:
|
||||
data = r.json()
|
||||
except Exception as e:
|
||||
print("❌ Failed to decode JSON:", e)
|
||||
print("Raw text:\n", r.text)
|
||||
return
|
||||
|
||||
requests_data = data.get("data", {}).get("requests", [])
|
||||
if not requests_data:
|
||||
print("⚠️ No requests found or invalid response.")
|
||||
return
|
||||
|
||||
print(f"📋 Found {len(requests_data)} active requests:\n")
|
||||
for req in requests_data:
|
||||
patient = req.get("extendedPatient", {})
|
||||
print(f"- {patient.get('surname','')} {patient.get('name','')} "
|
||||
f"({patient.get('identificationNumber','')}) "
|
||||
f"→ {req.get('displayTitle','')} [ID: {req.get('id')}]")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import requests
|
||||
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
# --- Try including `updatedAt` field directly ---
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
createdAt
|
||||
updatedAt # 👈 TESTUJEME, jestli Medevio toto pole podporuje
|
||||
doneAt
|
||||
removedAt
|
||||
displayTitle(locale: $locale)
|
||||
lastMessage { createdAt }
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": 3, "offset": 0},
|
||||
"locale": "cs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
url = "https://api.medevio.cz/graphql"
|
||||
print("📡 Querying Medevio GraphQL API (testing `updatedAt` field)...\n")
|
||||
|
||||
r = requests.post(url, json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
|
||||
try:
|
||||
data = r.json()
|
||||
except Exception as e:
|
||||
print("❌ Failed to parse JSON:", e)
|
||||
print("Raw text:\n", r.text)
|
||||
return
|
||||
|
||||
print("=== JSON response ===")
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
|
||||
# Quick check: did it return an error message about updatedAt?
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
print("\n⚠️ Medevio returned GraphQL error:")
|
||||
for e in errors:
|
||||
print(f" → {e.get('message')}")
|
||||
else:
|
||||
print("\n✅ No errors, `updatedAt` might exist in schema!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
import requests
|
||||
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug
|
||||
queueId: $queueId
|
||||
queueAssignment: $queueAssignment
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
createdAt
|
||||
doneAt
|
||||
displayTitle(locale: $locale)
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
# 👇 state zcela vynechán
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": 10, "offset": 0},
|
||||
"locale": "cs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
url = "https://api.medevio.cz/graphql"
|
||||
print("📡 Querying Medevio GraphQL API (no state argument)...\n")
|
||||
r = requests.post(url, json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
|
||||
try:
|
||||
data = r.json()
|
||||
print("=== JSON response ===")
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
except Exception as e:
|
||||
print("❌ Failed to parse JSON:", e)
|
||||
print("Raw text:\n", r.text)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import time
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BATCH_SIZE = 100
|
||||
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!,
|
||||
$state: PatientRequestState
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
pageInfo: $pageInfo,
|
||||
state: $state
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# 🔑 TOKEN
|
||||
# ================================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ================================
|
||||
# 🕒 DATETIME CONVERSION
|
||||
# ================================
|
||||
def to_mysql_dt(iso_str):
|
||||
"""Convert ISO 8601 (with Z) to MySQL DATETIME."""
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ================================
|
||||
# 💾 UPSERT TO MYSQL
|
||||
# ================================
|
||||
def upsert(conn, r):
|
||||
p = (r.get("extendedPatient") or {})
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
vals = (
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt(r.get("createdAt")),
|
||||
to_mysql_dt(r.get("updatedAt")),
|
||||
to_mysql_dt(r.get("doneAt")),
|
||||
to_mysql_dt(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
# ================================
|
||||
# 📡 FETCH ONE BATCH
|
||||
# ================================
|
||||
def fetch_batch(headers, state, offset):
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": state,
|
||||
}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
data = r.json().get("data", {}).get("requestsResponse", {})
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
total_downloaded = 0
|
||||
total_upserted = 0
|
||||
|
||||
for state in STATES:
|
||||
print(f"\n📡 STATE = {state}")
|
||||
offset = 0
|
||||
state_total = None
|
||||
while True:
|
||||
batch, count_total = fetch_batch(headers, state, offset)
|
||||
if state_total is None:
|
||||
state_total = count_total
|
||||
print(f" • Total from server: {state_total}")
|
||||
if not batch:
|
||||
break
|
||||
print(f" • Offset {offset:>5}: got {len(batch)}")
|
||||
for r in batch:
|
||||
upsert(conn, r)
|
||||
total_upserted += 1
|
||||
total_downloaded += len(batch)
|
||||
offset += BATCH_SIZE
|
||||
if offset >= state_total:
|
||||
break
|
||||
time.sleep(0.4) # respect API
|
||||
|
||||
conn.close()
|
||||
print(f"\n✅ Done. Downloaded {total_downloaded} items, upserted {total_upserted} rows (states: {', '.join(STATES)}).")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import time
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BATCH_SIZE = 1000
|
||||
STATES = ["ACTIVE", "DONE"] # optionally add "REMOVED"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!,
|
||||
$state: PatientRequestState
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
pageInfo: $pageInfo,
|
||||
state: $state
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# 🔑 TOKEN
|
||||
# ================================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ================================
|
||||
# 🕒 DATETIME CONVERSION
|
||||
# ================================
|
||||
def to_mysql_dt(iso_str):
|
||||
"""Convert ISO 8601 (with Z) to MySQL DATETIME."""
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
# ================================
|
||||
# 💾 UPSERT TO MYSQL
|
||||
# ================================
|
||||
def upsert_many(conn, batch):
|
||||
"""Upsert multiple records in one commit."""
|
||||
if not batch:
|
||||
return
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
vals = []
|
||||
for r in batch:
|
||||
p = (r.get("extendedPatient") or {})
|
||||
vals.append((
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt(r.get("createdAt")),
|
||||
to_mysql_dt(r.get("updatedAt")),
|
||||
to_mysql_dt(r.get("doneAt")),
|
||||
to_mysql_dt(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
))
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.executemany(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
def upsert(conn, r):
|
||||
p = (r.get("extendedPatient") or {})
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
vals = (
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt(r.get("createdAt")),
|
||||
to_mysql_dt(r.get("updatedAt")),
|
||||
to_mysql_dt(r.get("doneAt")),
|
||||
to_mysql_dt(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
# ================================
|
||||
# 📡 FETCH ONE BATCH
|
||||
# ================================
|
||||
def fetch_batch(headers, state, offset):
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": state,
|
||||
}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
data = r.json().get("data", {}).get("requestsResponse", {})
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
total_downloaded = 0
|
||||
total_upserted = 0
|
||||
|
||||
for state in STATES:
|
||||
print(f"\n📡 STATE = {state}")
|
||||
offset = 0
|
||||
state_total = None
|
||||
while True:
|
||||
batch, count_total = fetch_batch(headers, state, offset)
|
||||
if state_total is None:
|
||||
state_total = count_total
|
||||
print(f" • Total from server: {state_total}")
|
||||
if not batch:
|
||||
break
|
||||
print(f" • Offset {offset:>5}: got {len(batch)}")
|
||||
|
||||
# Perform one efficient upsert for the entire batch
|
||||
upsert_many(conn, batch)
|
||||
|
||||
total_upserted += len(batch)
|
||||
total_downloaded += len(batch)
|
||||
offset += BATCH_SIZE
|
||||
if offset >= state_total:
|
||||
break
|
||||
time.sleep(10) # respect API
|
||||
|
||||
conn.close()
|
||||
print(f"\n✅ Done. Downloaded {total_downloaded} items, upserted {total_upserted} rows (states: {', '.join(STATES)}).")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,92 @@
|
||||
import requests
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
# === Nastavení ===
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
|
||||
OUTPUT_DIR = Path(r"u:\Dropbox\!!!Days\Downloads Z230\Medevio_přílohy")
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2(
|
||||
$requestId: UUID!,
|
||||
$isDoctor: Boolean!
|
||||
) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient @include(if: $isDoctor)
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
"isDoctor": True,
|
||||
"requestId": REQUEST_ID,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {read_token(TOKEN_PATH)}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
print("📡 Querying Medevio API for attachments...\n")
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
|
||||
data = r.json()
|
||||
records = data.get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
if not records:
|
||||
print("⚠️ No attachments found.")
|
||||
exit()
|
||||
|
||||
# === Uložení ===
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
print(f"📂 Saving {len(records)} attachments to: {OUTPUT_DIR}\n")
|
||||
|
||||
for rec in records:
|
||||
med = rec.get("medicalRecord", {})
|
||||
url = med.get("downloadUrl")
|
||||
name = med.get("description", med.get("id")) or "unknown.pdf"
|
||||
|
||||
if not url:
|
||||
print(f"❌ Skipped {name} (no download URL)")
|
||||
continue
|
||||
|
||||
safe_name = name.replace("/", "_").replace("\\", "_")
|
||||
out_path = OUTPUT_DIR / safe_name
|
||||
|
||||
print(f"⬇️ Downloading: {safe_name}")
|
||||
try:
|
||||
file_data = requests.get(url, timeout=30)
|
||||
file_data.raise_for_status()
|
||||
out_path.write_bytes(file_data.content)
|
||||
print(f"✅ Saved: {out_path.name} ({len(file_data.content)/1024:.1f} KB)")
|
||||
except Exception as e:
|
||||
print(f"❌ Error saving {safe_name}: {e}")
|
||||
|
||||
print("\n🎉 Done!")
|
||||
@@ -0,0 +1,59 @@
|
||||
import requests
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
REQUEST_ID = "092a0c63-28be-4c6b-ab3b-204e1e2641d4"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2(
|
||||
$requestId: UUID!,
|
||||
|
||||
) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
variables = {
|
||||
"requestId": REQUEST_ID,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {read_token(TOKEN_PATH)}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
print("📡 Querying Medevio API...\n")
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
print(f"HTTP status: {r.status_code}\n")
|
||||
print(json.dumps(r.json(), indent=2, ensure_ascii=False))
|
||||
@@ -0,0 +1,204 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os,zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import shutil
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BASE_DIR = Path(r"u:\Dropbox\ordinace\Dokumentace_ke_zpracování\Medevio_přílohy")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2(
|
||||
$requestId: UUID!,
|
||||
) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def short_crc8(uuid_str: str) -> str:
|
||||
"""Return deterministic 8-char hex string from any input string (CRC32)."""
|
||||
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
|
||||
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
"""Extracts filename from S3-style URL (between last '/' and first '?')."""
|
||||
try:
|
||||
filename = url.split("/")[-1].split("?")[0]
|
||||
return filename
|
||||
except Exception:
|
||||
return "unknown_filename"
|
||||
|
||||
def safe_rename(src: Path, dst: Path, retries: int = 5, delay: float = 3.0):
|
||||
"""Rename a folder with retries to avoid Dropbox/OneDrive sync lock issues."""
|
||||
for attempt in range(1, retries + 1):
|
||||
try:
|
||||
src.rename(dst)
|
||||
return # success
|
||||
except PermissionError as e:
|
||||
print(f" ⚠️ Rename attempt {attempt}/{retries} failed ({e}) — waiting {delay}s...")
|
||||
time.sleep(delay)
|
||||
except Exception as e:
|
||||
print(f" ❌ Unexpected rename error: {e}")
|
||||
break
|
||||
print(f" 🚫 Failed to rename '{src}' → '{dst}' after {retries} attempts.")
|
||||
|
||||
# ==============================
|
||||
# 🔑 TOKEN
|
||||
# ==============================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ==============================
|
||||
# 💾 DOWNLOAD FILE
|
||||
# ==============================
|
||||
def download_file(url: str, out_path: Path):
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(out_path, "wb") as f:
|
||||
f.write(r.content)
|
||||
print(f" 💾 Saved: {out_path.relative_to(BASE_DIR)}")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Failed to download {out_path.name}: {e}")
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH ATTACHMENTS
|
||||
# ==============================
|
||||
def fetch_attachments(headers, request_id):
|
||||
variables = {
|
||||
"requestId": request_id,
|
||||
}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code}")
|
||||
return []
|
||||
data = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
return data
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
|
||||
FROM pozadavky
|
||||
WHERE displayTitle = 'Odeslat lékařskou zprávu'
|
||||
""")
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Found {len(rows)} 'Odeslat lékařskou zprávu' requests")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
print(req_id)
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
jmeno = row.get("pacient_jmeno") or ""
|
||||
created = row.get("createdAt")
|
||||
created_date = None
|
||||
if created:
|
||||
try:
|
||||
created_date = datetime.strptime(str(created), "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%d")
|
||||
except Exception:
|
||||
created_date = "unknown"
|
||||
|
||||
patient_dir = BASE_DIR / f"{prijmeni}, {jmeno}" / created_date
|
||||
print(f"\n[{i}/{len(rows)}] 📂 {patient_dir.relative_to(BASE_DIR)}")
|
||||
|
||||
attachments = fetch_attachments(headers, req_id)
|
||||
# print(attachments)
|
||||
|
||||
|
||||
if not attachments:
|
||||
print(" ⚠️ No attachments")
|
||||
continue
|
||||
|
||||
|
||||
# vytvoř krátký CRC32 hash z UUID
|
||||
uuid_short = short_crc8(str(req_id))
|
||||
|
||||
# Dočasná složka bez počtu
|
||||
temp_dir = BASE_DIR / f"{prijmeni}, {jmeno}" / f"{created_date} {uuid_short}"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
# fname = m.get("description") or f"{m.get('id')}.bin"
|
||||
url = m.get("downloadUrl")
|
||||
fname = extract_filename_from_url(url)
|
||||
|
||||
if url:
|
||||
out_path = temp_dir / fname
|
||||
download_file(url, out_path)
|
||||
|
||||
|
||||
# Po stažení všech příloh spočítej skutečné soubory
|
||||
real_count = len([f for f in temp_dir.iterdir() if f.is_file()])
|
||||
|
||||
# Přejmenuj složku na finální název s počtem
|
||||
final_dir = temp_dir.parent / f"{temp_dir.name} ({real_count})"
|
||||
if real_count != 0:
|
||||
safe_rename(temp_dir, final_dir)
|
||||
print(f" 📎 Saved {real_count} attachments → {final_dir.relative_to(BASE_DIR)}")
|
||||
else:
|
||||
print(f" ⚠️ No attachments for {temp_dir.name}")
|
||||
temp_dir.rmdir() # smaž prázdnou složku
|
||||
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Done!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,209 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download all 'Odeslat lékařskou zprávu' attachments from Medevio API
|
||||
and store them (including binary content) directly into MySQL table `medevio_downloads`.
|
||||
|
||||
Each attachment (PDF, image, etc.) is fetched once and saved as LONGBLOB.
|
||||
Duplicate protection is ensured via UNIQUE KEY on `attachment_id`.
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def short_crc8(uuid_str: str) -> str:
|
||||
"""Return deterministic 8-char hex string from any input string (CRC32)."""
|
||||
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
|
||||
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
"""Extracts filename from S3-style URL (between last '/' and first '?')."""
|
||||
try:
|
||||
return url.split("/")[-1].split("?")[0]
|
||||
except Exception:
|
||||
return "unknown_filename"
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
"""Read Bearer token from file."""
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH ATTACHMENTS
|
||||
# ==============================
|
||||
def fetch_attachments(headers, request_id):
|
||||
variables = {"requestId": request_id}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
data = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
return data
|
||||
|
||||
# ==============================
|
||||
# 💾 SAVE TO MYSQL (with skip)
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids):
|
||||
attachment_id = a.get("id")
|
||||
if attachment_id in existing_ids:
|
||||
print(f" ⏭️ Skipping already downloaded attachment {attachment_id}")
|
||||
return
|
||||
|
||||
url = m.get("downloadUrl")
|
||||
if not url:
|
||||
print(" ⚠️ No download URL")
|
||||
return
|
||||
|
||||
filename = extract_filename_from_url(url)
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
content = r.content
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Failed to download {url}: {e}")
|
||||
return
|
||||
|
||||
file_size = len(content)
|
||||
attachment_type = a.get("attachmentType")
|
||||
content_type = m.get("contentType")
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type, filename,
|
||||
content_type, file_size, pacient_jmeno, pacient_prijmeni,
|
||||
created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
attachment_type,
|
||||
filename,
|
||||
content_type,
|
||||
file_size,
|
||||
jmeno,
|
||||
prijmeni,
|
||||
created_date,
|
||||
content
|
||||
))
|
||||
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||
existing_ids.add(attachment_id) # add to skip list
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
print("📦 Loading list of already downloaded attachments...")
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||
print(f"✅ Found {len(existing_ids)} attachments already saved.")
|
||||
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
|
||||
FROM pozadavky
|
||||
WHERE displayTitle = 'Odeslat lékařskou zprávu'
|
||||
""")
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Found {len(rows)} 'Odeslat lékařskou zprávu' requests")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
jmeno = row.get("pacient_jmeno") or ""
|
||||
created = row.get("createdAt")
|
||||
|
||||
try:
|
||||
created_date = datetime.strptime(str(created), "%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
created_date = None
|
||||
|
||||
print(f"\n[{i}/{len(rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||
|
||||
attachments = fetch_attachments(headers, req_id)
|
||||
if not attachments:
|
||||
print(" ⚠️ No attachments")
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids)
|
||||
conn.commit()
|
||||
|
||||
print(f" ✅ {len(attachments)} attachments saved for {prijmeni}, {jmeno}")
|
||||
time.sleep(0.5) # be nice to the API
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Done! All attachments stored in MySQL table `medevio_downloads`.")
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Fetch communication threads (messages) from Medevio API
|
||||
for pozadavky where communicationprocessed IS NULL or outdated,
|
||||
optionally filtered by creation date.
|
||||
Stores results in MySQL table `medevio_messages`.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
GRAPHQL_URL = "https://api.medevio.cz/graphql"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
# ✅ Optional: Only process requests created after this date
|
||||
# Leave empty ("") to process all
|
||||
CREATED_AFTER = "2025-11-09" # 🕓 Adjust freely, or set to "" for no limit
|
||||
|
||||
# ==============================
|
||||
# 🔐 TOKEN
|
||||
# ==============================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {read_token(TOKEN_PATH)}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
# ==============================
|
||||
# 🧩 GRAPHQL QUERY
|
||||
# ==============================
|
||||
GRAPHQL_QUERY = """
|
||||
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
|
||||
messages: listMessages(
|
||||
patientRequestId: $requestId
|
||||
updatedSince: $updatedSince
|
||||
) {
|
||||
id
|
||||
createdAt
|
||||
text
|
||||
updatedAt
|
||||
readAt
|
||||
sender { id name surname clinicId }
|
||||
medicalRecord { downloadUrl description contentType }
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def normalize_ts(ts: str):
|
||||
"""Convert ISO 8601 string to MySQL DATETIME format."""
|
||||
if not ts:
|
||||
return None
|
||||
ts = ts.replace("T", " ").replace("Z", "")
|
||||
if "." in ts:
|
||||
ts = ts.split(".")[0]
|
||||
return ts
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH MESSAGES
|
||||
# ==============================
|
||||
def fetch_messages(request_id):
|
||||
payload = {
|
||||
"operationName": "UseMessages_ListMessages",
|
||||
"variables": {"requestId": request_id, "updatedSince": None},
|
||||
"query": GRAPHQL_QUERY,
|
||||
}
|
||||
r = requests.post(GRAPHQL_URL, headers=headers, json=payload, timeout=30)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code}: {r.text}")
|
||||
return []
|
||||
return r.json().get("data", {}).get("messages", []) or []
|
||||
|
||||
|
||||
# ==============================
|
||||
# 💾 CREATE TABLE IF NEEDED
|
||||
# ==============================
|
||||
def ensure_table_exists(conn):
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS medevio_messages (
|
||||
id VARCHAR(64) PRIMARY KEY,
|
||||
request_id VARCHAR(64),
|
||||
sender_name VARCHAR(255),
|
||||
sender_id VARCHAR(64),
|
||||
sender_clinic_id VARCHAR(64),
|
||||
text TEXT,
|
||||
created_at DATETIME NULL,
|
||||
read_at DATETIME NULL,
|
||||
updated_at DATETIME NULL,
|
||||
attachment_url TEXT,
|
||||
attachment_description TEXT,
|
||||
attachment_content_type VARCHAR(128),
|
||||
inserted_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
|
||||
# ==============================
|
||||
# 💾 INSERT MESSAGE
|
||||
# ==============================
|
||||
def insert_message(cur, req_id, msg):
|
||||
sender = msg.get("sender") or {}
|
||||
medrec = msg.get("medicalRecord") or {}
|
||||
|
||||
cur.execute("""
|
||||
REPLACE INTO medevio_messages (
|
||||
id, request_id, sender_name, sender_id, sender_clinic_id, text,
|
||||
created_at, read_at, updated_at,
|
||||
attachment_url, attachment_description, attachment_content_type
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
""", (
|
||||
msg.get("id"),
|
||||
req_id,
|
||||
f"{sender.get('name','')} {sender.get('surname','')}".strip(),
|
||||
sender.get("id"),
|
||||
sender.get("clinicId"),
|
||||
msg.get("text"),
|
||||
normalize_ts(msg.get("createdAt")),
|
||||
normalize_ts(msg.get("readAt")),
|
||||
normalize_ts(msg.get("updatedAt")),
|
||||
medrec.get("downloadUrl"),
|
||||
medrec.get("description"),
|
||||
medrec.get("contentType")
|
||||
))
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
ensure_table_exists(conn)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
sql = """
|
||||
SELECT id, createdAt, updatedAt, communicationprocessed
|
||||
FROM pozadavky
|
||||
WHERE (communicationprocessed IS NULL OR communicationprocessed < updatedAt)
|
||||
"""
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
cur.execute(sql, (CREATED_AFTER,))
|
||||
else:
|
||||
cur.execute(sql)
|
||||
|
||||
rows = cur.fetchall()
|
||||
|
||||
if not rows:
|
||||
print("✅ No pending communication updates.")
|
||||
return
|
||||
|
||||
print(f"📋 Found {len(rows)} requests needing communication check.")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
print(f"\n[{i}/{len(rows)}] 🔍 Fetching communication for {req_id} ...")
|
||||
|
||||
messages = fetch_messages(req_id)
|
||||
print(f" 💬 {len(messages)} messages found.")
|
||||
|
||||
# Update timestamp even if none found
|
||||
with conn.cursor() as cur:
|
||||
if messages:
|
||||
for msg in messages:
|
||||
insert_message(cur, req_id, msg)
|
||||
cur.execute("""
|
||||
UPDATE pozadavky
|
||||
SET communicationprocessed = NOW()
|
||||
WHERE id = %s
|
||||
""", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
print(f" ✅ Processed {len(messages)} messages for {req_id}")
|
||||
time.sleep(0.5) # avoid hammering the API
|
||||
|
||||
conn.close()
|
||||
print("\n✅ All communication threads processed and timestamps updated.")
|
||||
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,194 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download and store Medevio questionnaires (userNote + eCRF) for all patient requests.
|
||||
Uses the verified working query "GetPatientRequest2".
|
||||
"""
|
||||
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import time
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
GRAPHQL_URL = "https://api.medevio.cz/graphql"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
def fix_datetime(dt_str):
|
||||
"""Convert ISO 8601 string with 'Z' or ms into MySQL DATETIME format."""
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
# Remove trailing Z and parse flexible ISO format
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ✅ Optional: limit which requests to process
|
||||
CREATED_AFTER = "2025-11-09" # set "" to disable
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def read_token(p: Path) -> str:
|
||||
"""Read Bearer token from file."""
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
|
||||
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
userNote
|
||||
eventType
|
||||
extendedPatient(clinicSlug: $clinicSlug) {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
ecrfFilledData(locale: $locale) {
|
||||
name
|
||||
groups {
|
||||
label
|
||||
fields {
|
||||
name
|
||||
label
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def fetch_questionnaire(headers, request_id, clinic_slug):
|
||||
"""Fetch questionnaire for given request ID."""
|
||||
payload = {
|
||||
"operationName": "GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {
|
||||
"requestId": request_id,
|
||||
"clinicSlug": clinic_slug,
|
||||
"locale": "cs",
|
||||
},
|
||||
}
|
||||
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
|
||||
return None
|
||||
return r.json().get("data", {}).get("request")
|
||||
|
||||
|
||||
def insert_questionnaire(cur, req):
|
||||
"""Insert questionnaire data into MySQL."""
|
||||
if not req:
|
||||
return
|
||||
|
||||
patient = req.get("extendedPatient") or {}
|
||||
ecrf_data = req.get("ecrfFilledData")
|
||||
|
||||
created_at = fix_datetime(req.get("createdAt"))
|
||||
updated_at = fix_datetime(req.get("updatedAt"))
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_questionnaires (
|
||||
request_id, patient_name, patient_surname, patient_identification,
|
||||
created_at, updated_at, user_note, ecrf_json
|
||||
)
|
||||
VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
updated_at = VALUES(updated_at),
|
||||
user_note = VALUES(user_note),
|
||||
ecrf_json = VALUES(ecrf_json),
|
||||
updated_local = NOW()
|
||||
""", (
|
||||
req.get("id"),
|
||||
patient.get("name"),
|
||||
patient.get("surname"),
|
||||
patient.get("identificationNumber"),
|
||||
created_at,
|
||||
updated_at,
|
||||
req.get("userNote"),
|
||||
json.dumps(ecrf_data, ensure_ascii=False),
|
||||
))
|
||||
print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
with conn.cursor() as cur:
|
||||
sql = """
|
||||
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
|
||||
FROM pozadavky
|
||||
WHERE (questionnaireprocessed IS NULL OR questionnaireprocessed < updatedAt)
|
||||
"""
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
cur.execute(sql, (CREATED_AFTER,))
|
||||
else:
|
||||
cur.execute(sql)
|
||||
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Found {len(rows)} requests needing questionnaire check.")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
|
||||
|
||||
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
|
||||
if not req:
|
||||
print(" ⚠️ No questionnaire data found.")
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
insert_questionnaire(cur, req)
|
||||
cur.execute("UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.4) # polite pacing
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
|
||||
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pandas as pd
|
||||
import pymysql
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# ================================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ================================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
# kam uložit výstup
|
||||
OUTPUT_DIR = r"U:\Dropbox\!!!Days\Downloads Z230"
|
||||
DAYS_BACK = 700 # posledních X dní
|
||||
|
||||
# ================================
|
||||
# 📘 SQL dotaz
|
||||
# ================================
|
||||
SQL = f"""
|
||||
SELECT
|
||||
m.id AS Message_ID,
|
||||
m.request_id AS Request_ID,
|
||||
m.created_at AS Datum_vytvoření,
|
||||
m.sender_name AS Odesílatel,
|
||||
m.text AS Text_zprávy,
|
||||
m.pacient_jmeno AS Pacient_jméno,
|
||||
m.pacient_prijmeni AS Pacient_příjmení,
|
||||
m.pacient_rodnecislo AS Rodné_číslo
|
||||
FROM medevio_messages m
|
||||
WHERE m.created_at >= NOW() - INTERVAL {DAYS_BACK} DAY
|
||||
ORDER BY m.created_at DESC;
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def main():
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
df = pd.read_sql(SQL, conn)
|
||||
conn.close()
|
||||
|
||||
today = datetime.now().strftime("%Y-%m-%d")
|
||||
output_path = f"{OUTPUT_DIR}\\Medevio_messages_report_{today}.xlsx"
|
||||
|
||||
df.to_excel(output_path, index=False)
|
||||
|
||||
print(f"✅ Export hotov: {output_path}")
|
||||
print(f"📄 Počet řádků: {len(df)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import time, socket
|
||||
|
||||
# ===============================
|
||||
# ⚙️ CONFIG
|
||||
# ===============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetMessages(
|
||||
$clinicSlug: String!,
|
||||
$requestId: ID!
|
||||
) {
|
||||
clinicRequestDetail_GetPatientRequestMessages(
|
||||
clinicSlug: $clinicSlug,
|
||||
requestId: $requestId
|
||||
) {
|
||||
id
|
||||
text
|
||||
createdAt
|
||||
sender {
|
||||
id
|
||||
name
|
||||
}
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ===============================
|
||||
# 🔑 Token reader
|
||||
# ===============================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
# ===============================
|
||||
# 🕒 Helper
|
||||
# ===============================
|
||||
def to_mysql_dt(iso_str):
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ===============================
|
||||
# 💾 Upsert
|
||||
# ===============================
|
||||
def upsert_message(conn, msg, request_id):
|
||||
s = msg.get("sender") or {}
|
||||
p = msg.get("extendedPatient") or {}
|
||||
|
||||
sql = """
|
||||
INSERT INTO medevio_messages (
|
||||
id, request_id, sender_name, sender_id, text, created_at,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
text=VALUES(text),
|
||||
created_at=VALUES(created_at),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
|
||||
vals = (
|
||||
msg.get("id"),
|
||||
request_id,
|
||||
s.get("name"),
|
||||
s.get("id"),
|
||||
msg.get("text"),
|
||||
to_mysql_dt(msg.get("createdAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
# ===============================
|
||||
# 📡 Fetch messages for one request
|
||||
# ===============================
|
||||
def fetch_messages(headers, request_id):
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetMessages",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {"clinicSlug": CLINIC_SLUG, "requestId": request_id},
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
data = r.json().get("data", {}).get("clinicRequestDetail_GetPatientRequestMessages", [])
|
||||
return data
|
||||
|
||||
# ===============================
|
||||
# 🧠 Main
|
||||
# ===============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
cur = conn.cursor()
|
||||
|
||||
# vezmeme všechny request_id z tabulky pozadavky
|
||||
cur.execute("SELECT id FROM pozadavky ORDER BY updatedAt DESC")
|
||||
request_ids = [r["id"] for r in cur.fetchall()]
|
||||
print(f"📋 Found {len(request_ids)} požadavků.")
|
||||
|
||||
for i, rid in enumerate(request_ids, 1):
|
||||
try:
|
||||
msgs = fetch_messages(headers, rid)
|
||||
for msg in msgs:
|
||||
upsert_message(conn, msg, rid)
|
||||
print(f"[{i}/{len(request_ids)}] {rid} → {len(msgs)} zpráv uloženo.")
|
||||
time.sleep(0.4)
|
||||
except Exception as e:
|
||||
print(f"❌ Chyba při načítání {rid}: {e}")
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Hotovo, všechny zprávy synchronizovány.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# ================================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BATCH_SIZE = 100
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestGrid_ListPatientRequestsForClinic2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!,
|
||||
$state: PatientRequestState
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
pageInfo: $pageInfo,
|
||||
state: $state
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# 🔑 TOKEN
|
||||
# ================================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
# ================================
|
||||
# 📡 FETCH FUNCTION
|
||||
# ================================
|
||||
def fetch_requests(headers, state, offset=0):
|
||||
"""Fetch a batch of patient requests for a given state."""
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": state,
|
||||
}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestGrid_ListPatientRequestsForClinic2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
for attempt in range(3): # up to 3 attempts
|
||||
try:
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
r.raise_for_status()
|
||||
resp = r.json().get("data", {}).get("requestsResponse", {})
|
||||
return resp.get("patientRequests", []), resp.get("count", 0)
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"⚠️ Chyba při načítání (pokus {attempt+1}/3): {e}")
|
||||
time.sleep(5)
|
||||
return [], 0
|
||||
|
||||
# ================================
|
||||
# 💾 UPDATE ALL MESSAGES BY PATIENT DATA
|
||||
# ================================
|
||||
def update_all_messages(conn, patient):
|
||||
"""Update all messages belonging to this request with patient data."""
|
||||
p = patient.get("extendedPatient") or {}
|
||||
if not p:
|
||||
return 0
|
||||
|
||||
sql = """
|
||||
UPDATE medevio_messages
|
||||
SET pacient_jmeno=%s,
|
||||
pacient_prijmeni=%s,
|
||||
pacient_rodnecislo=%s
|
||||
WHERE request_id=%s
|
||||
"""
|
||||
vals = (p.get("name"), p.get("surname"), p.get("identificationNumber"), patient.get("id"))
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
affected = cur.rowcount
|
||||
conn.commit()
|
||||
return affected
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def process_state(conn, headers, state):
|
||||
print(f"\n=== 🟦 Zpracovávám {state} požadavky ===")
|
||||
offset = 0
|
||||
total_processed = 0
|
||||
total_updated = 0
|
||||
|
||||
while True:
|
||||
batch, total_count = fetch_requests(headers, state, offset)
|
||||
if not batch:
|
||||
break
|
||||
|
||||
print(f"📦 Dávka od offsetu {offset} ({len(batch)} záznamů z {total_count})")
|
||||
for r in batch:
|
||||
updated = update_all_messages(conn, r)
|
||||
total_processed += 1
|
||||
total_updated += updated
|
||||
if updated:
|
||||
print(f" ↳ {r.get('id')} → {updated} zpráv aktualizováno")
|
||||
|
||||
offset += BATCH_SIZE
|
||||
if offset >= total_count:
|
||||
break
|
||||
|
||||
time.sleep(0.4)
|
||||
|
||||
print(f"✅ {state}: zpracováno {total_processed} požadavků, aktualizováno {total_updated} zpráv.")
|
||||
return total_processed, total_updated
|
||||
|
||||
# ================================
|
||||
# 🚀 ENTRY POINT
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
print(f"\n=== Medevio mass patient sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
||||
|
||||
total_p, total_u = process_state(conn, headers, "ACTIVE")
|
||||
done_p, done_u = process_state(conn, headers, "DONE")
|
||||
|
||||
conn.close()
|
||||
|
||||
print("\n=== 🧾 SOUHRN ===")
|
||||
print(f"ACTIVE: {total_p} požadavků, {total_u} zpráv aktualizováno")
|
||||
print(f"DONE: {done_p} požadavků, {done_u} zpráv aktualizováno")
|
||||
print("===========================================")
|
||||
print(f"CELKEM: {total_p + done_p} požadavků, {total_u + done_u} zpráv aktualizováno ✅")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
||||
and (optionally) createdAt is newer than a configurable cutoff date.
|
||||
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed = NOW().
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
# ✅ Optional: Only process requests created after this date
|
||||
# Leave empty ("") to process all
|
||||
CREATED_AFTER = "2025-01-01" # 🕓 Adjust freely, or set to "" for no limit
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def short_crc8(uuid_str: str) -> str:
|
||||
"""Return deterministic 8-char hex string from any input string (CRC32)."""
|
||||
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
|
||||
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
"""Extracts filename from S3-style URL (between last '/' and first '?')."""
|
||||
try:
|
||||
return url.split("/")[-1].split("?")[0]
|
||||
except Exception:
|
||||
return "unknown_filename"
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
"""Read Bearer token from file."""
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH ATTACHMENTS
|
||||
# ==============================
|
||||
def fetch_attachments(headers, request_id):
|
||||
variables = {"requestId": request_id}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
data = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
return data
|
||||
|
||||
# ==============================
|
||||
# 💾 SAVE TO MYSQL (with skip)
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids):
|
||||
attachment_id = a.get("id")
|
||||
if attachment_id in existing_ids:
|
||||
print(f" ⏭️ Skipping already downloaded attachment {attachment_id}")
|
||||
return False
|
||||
|
||||
url = m.get("downloadUrl")
|
||||
if not url:
|
||||
print(" ⚠️ No download URL")
|
||||
return False
|
||||
|
||||
filename = extract_filename_from_url(url)
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
content = r.content
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Failed to download {url}: {e}")
|
||||
return False
|
||||
|
||||
file_size = len(content)
|
||||
attachment_type = a.get("attachmentType")
|
||||
content_type = m.get("contentType")
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type, filename,
|
||||
content_type, file_size, pacient_jmeno, pacient_prijmeni,
|
||||
created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
attachment_type,
|
||||
filename,
|
||||
content_type,
|
||||
file_size,
|
||||
jmeno,
|
||||
prijmeni,
|
||||
created_date,
|
||||
content
|
||||
))
|
||||
existing_ids.add(attachment_id)
|
||||
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||
return True
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
print("📦 Loading list of already downloaded attachments...")
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||
print(f"✅ Found {len(existing_ids)} attachments already saved.")
|
||||
|
||||
# ✅ Dynamic SQL with optional createdAt filter
|
||||
sql = """
|
||||
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
|
||||
FROM pozadavky
|
||||
WHERE attachmentsProcessed IS NULL
|
||||
"""
|
||||
params = []
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
params.append(CREATED_AFTER)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, params)
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Found {len(rows)} pozadavky to process (attachmentsProcessed IS NULL"
|
||||
+ (f", created >= {CREATED_AFTER}" if CREATED_AFTER else "") + ")")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
time.sleep(1) # polite API delay
|
||||
req_id = row["id"]
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
jmeno = row.get("pacient_jmeno") or ""
|
||||
created = row.get("createdAt")
|
||||
|
||||
try:
|
||||
created_date = datetime.strptime(str(created), "%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
created_date = None
|
||||
|
||||
print(f"\n[{i}/{len(rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||
|
||||
attachments = fetch_attachments(headers, req_id)
|
||||
if not attachments:
|
||||
print(" ⚠️ No attachments found")
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date, existing_ids)
|
||||
conn.commit()
|
||||
|
||||
# ✅ mark processed
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
print(f" ✅ {len(attachments)} attachments processed for {prijmeni}, {jmeno}")
|
||||
time.sleep(0.3) # polite API delay
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Done! All new attachments processed and pozadavky updated.")
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,240 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
import time
|
||||
from dateutil import parser
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BATCH_SIZE = 100
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
# ⭐ NOVÝ TESTOVANÝ DOTAZ – obsahuje lastMessage.createdAt
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestList2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
lastMessage {
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
# ================================
|
||||
# 🧿 SAFE DATETIME PARSER (ALWAYS UTC → LOCAL)
|
||||
# ================================
|
||||
def to_mysql_dt_utc(iso_str):
|
||||
"""
|
||||
Parse Medevio timestamps safely.
|
||||
Treat timestamps WITHOUT timezone as UTC.
|
||||
Convert to local time before saving to MySQL.
|
||||
"""
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = parser.isoparse(iso_str)
|
||||
|
||||
# If tz is missing → assume UTC
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
# Convert to local timezone
|
||||
dt_local = dt.astimezone()
|
||||
|
||||
return dt_local.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
# ================================
|
||||
# 🔑 TOKEN
|
||||
# ================================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
return tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
# ================================
|
||||
# 💾 UPSERT (včetně správného updatedAt)
|
||||
# ================================
|
||||
def upsert(conn, r):
|
||||
p = r.get("extendedPatient") or {}
|
||||
|
||||
# raw timestamps z API – nyní přes nový parser
|
||||
api_updated = to_mysql_dt_utc(r.get("updatedAt"))
|
||||
|
||||
last_msg = r.get("lastMessage") or {}
|
||||
msg_updated = to_mysql_dt_utc(last_msg.get("createdAt"))
|
||||
|
||||
# nejnovější změna
|
||||
def max_dt(a, b):
|
||||
if a and b:
|
||||
return max(a, b)
|
||||
return a or b
|
||||
|
||||
final_updated = max_dt(api_updated, msg_updated)
|
||||
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
|
||||
vals = (
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt_utc(r.get("createdAt")),
|
||||
final_updated,
|
||||
to_mysql_dt_utc(r.get("doneAt")),
|
||||
to_mysql_dt_utc(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
# ================================
|
||||
# 📡 FETCH ACTIVE PAGE
|
||||
# ================================
|
||||
def fetch_active(headers, offset):
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": "ACTIVE",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestList2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json().get("data", {}).get("requestsResponse", {})
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
print(f"\n=== Sync ACTIVE požadavků @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
||||
|
||||
offset = 0
|
||||
total_processed = 0
|
||||
total_count = None
|
||||
|
||||
while True:
|
||||
batch, count = fetch_active(headers, offset)
|
||||
|
||||
if total_count is None:
|
||||
total_count = count
|
||||
print(f"📡 Celkem ACTIVE v Medevio: {count}")
|
||||
|
||||
if not batch:
|
||||
break
|
||||
|
||||
for r in batch:
|
||||
upsert(conn, r)
|
||||
|
||||
total_processed += len(batch)
|
||||
print(f" • {total_processed}/{total_count} ACTIVE processed")
|
||||
|
||||
if offset + BATCH_SIZE >= count:
|
||||
break
|
||||
|
||||
offset += BATCH_SIZE
|
||||
time.sleep(0.4)
|
||||
|
||||
conn.close()
|
||||
print("\n✅ ACTIVE sync hotovo!\n")
|
||||
|
||||
|
||||
# ================================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,210 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from dateutil import parser
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
LIMIT = 500 # batch size / number of records
|
||||
FULL_DOWNLOAD = False # 🔥 TOGGLE: False = last X, True = ALL batches
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
# ⭐ Query with lastMessage
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestList2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
lastMessage {
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# TOKEN
|
||||
# ================================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
# ================================
|
||||
# DATETIME PARSER (UTC → MySQL)
|
||||
# ================================
|
||||
def to_mysql_dt(iso_str):
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = parser.isoparse(iso_str) # ISO8601 → aware datetime (UTC)
|
||||
dt = dt.astimezone() # convert to local timezone
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except:
|
||||
return None
|
||||
|
||||
# ================================
|
||||
# UPSERT REQUEST
|
||||
# ================================
|
||||
def upsert(conn, r):
|
||||
p = r.get("extendedPatient") or {}
|
||||
|
||||
api_updated = to_mysql_dt(r.get("updatedAt"))
|
||||
last_msg = r.get("lastMessage") or {}
|
||||
msg_at = to_mysql_dt(last_msg.get("createdAt"))
|
||||
|
||||
def max_dt(a, b):
|
||||
if a and b:
|
||||
return max(a, b)
|
||||
return a or b
|
||||
|
||||
final_updated = max_dt(api_updated, msg_at)
|
||||
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
|
||||
vals = (
|
||||
r["id"],
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt(r.get("createdAt")),
|
||||
final_updated,
|
||||
to_mysql_dt(r.get("doneAt")),
|
||||
to_mysql_dt(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
|
||||
conn.commit()
|
||||
|
||||
# ================================
|
||||
# FETCH DONE REQUESTS (one batch)
|
||||
# ================================
|
||||
def fetch_done(headers, offset):
|
||||
vars = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": LIMIT, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": "DONE",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestList2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": vars,
|
||||
}
|
||||
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()["data"]["requestsResponse"]
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
# ================================
|
||||
# MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
print(f"\n=== Sync CLOSED requests @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
||||
|
||||
offset = 0
|
||||
total_count = None
|
||||
total_processed = 0
|
||||
|
||||
while True:
|
||||
batch, count = fetch_done(headers, offset)
|
||||
|
||||
if total_count is None:
|
||||
total_count = count
|
||||
print(f"📡 Total DONE in Medevio: {count}")
|
||||
|
||||
if not batch:
|
||||
break
|
||||
|
||||
print(f" • Processing batch offset={offset} size={len(batch)}")
|
||||
|
||||
for r in batch:
|
||||
upsert(conn, r)
|
||||
total_processed += len(batch)
|
||||
|
||||
if not FULL_DOWNLOAD:
|
||||
# process only last LIMIT records
|
||||
break
|
||||
|
||||
# FULL DOWNLOAD → fetch next batch
|
||||
offset += LIMIT
|
||||
if offset >= count:
|
||||
break
|
||||
|
||||
conn.close()
|
||||
print(f"\n✅ DONE — {total_processed} requests synced.\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download and store Medevio questionnaires (userNote + eCRF) for all patient requests.
|
||||
Uses the verified working query "GetPatientRequest2".
|
||||
"""
|
||||
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
# strip emoji + anything above BMP
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
# final ASCII fallback
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
GRAPHQL_URL = "https://api.medevio.cz/graphql"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🕒 DATETIME FIXER
|
||||
# ==============================
|
||||
def fix_datetime(dt_str):
|
||||
"""Convert ISO 8601 string with 'Z' or ms into MySQL DATETIME format."""
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# Optional filter
|
||||
CREATED_AFTER = "2025-01-01"
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
return tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
|
||||
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
userNote
|
||||
eventType
|
||||
extendedPatient(clinicSlug: $clinicSlug) {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
ecrfFilledData(locale: $locale) {
|
||||
name
|
||||
groups {
|
||||
label
|
||||
fields {
|
||||
name
|
||||
label
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def fetch_questionnaire(headers, request_id, clinic_slug):
|
||||
"""Fetch questionnaire for given request ID."""
|
||||
payload = {
|
||||
"operationName": "GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {
|
||||
"requestId": request_id,
|
||||
"clinicSlug": clinic_slug,
|
||||
"locale": "cs",
|
||||
},
|
||||
}
|
||||
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
|
||||
if r.status_code != 200:
|
||||
safe_print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
|
||||
return None
|
||||
return r.json().get("data", {}).get("request")
|
||||
|
||||
|
||||
def insert_questionnaire(cur, req):
|
||||
"""Insert questionnaire data into MySQL."""
|
||||
if not req:
|
||||
return
|
||||
|
||||
patient = req.get("extendedPatient") or {}
|
||||
ecrf_data = req.get("ecrfFilledData")
|
||||
|
||||
created_at = fix_datetime(req.get("createdAt"))
|
||||
updated_at = fix_datetime(req.get("updatedAt"))
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_questionnaires (
|
||||
request_id, created_at, updated_at, user_note, ecrf_json
|
||||
)
|
||||
VALUES (%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
updated_at = VALUES(updated_at),
|
||||
user_note = VALUES(user_note),
|
||||
ecrf_json = VALUES(ecrf_json),
|
||||
updated_local = NOW()
|
||||
""", (
|
||||
req.get("id"),
|
||||
created_at,
|
||||
updated_at,
|
||||
req.get("userNote"),
|
||||
json.dumps(ecrf_data, ensure_ascii=False),
|
||||
))
|
||||
|
||||
safe_print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# load list of requests
|
||||
with conn.cursor() as cur:
|
||||
sql = """
|
||||
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
|
||||
FROM pozadavky
|
||||
WHERE (questionnaireprocessed IS NULL OR questionnaireprocessed < updatedAt)
|
||||
"""
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
cur.execute(sql, (CREATED_AFTER,))
|
||||
else:
|
||||
cur.execute(sql)
|
||||
|
||||
rows = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Found {len(rows)} requests needing questionnaire check.")
|
||||
|
||||
# process each one
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
safe_print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
|
||||
|
||||
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
|
||||
if not req:
|
||||
safe_print(" ⚠️ No questionnaire data found.")
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
insert_questionnaire(cur, req)
|
||||
cur.execute(
|
||||
"UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s",
|
||||
(req_id,)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.6)
|
||||
|
||||
conn.close()
|
||||
safe_print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
|
||||
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,287 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Stáhne konverzaci pro požadavky, kde:
|
||||
messagesProcessed IS NULL OR messagesProcessed < updatedAt.
|
||||
|
||||
Vloží do medevio_conversation a přílohy do medevio_downloads.
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc or not enc.lower().startswith("utf"):
|
||||
# strip emoji + characters outside BMP for Task Scheduler (CP1250)
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
# fallback pure ASCII
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY_MESSAGES = r"""
|
||||
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
|
||||
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
|
||||
id
|
||||
createdAt
|
||||
updatedAt
|
||||
readAt
|
||||
text
|
||||
type
|
||||
sender {
|
||||
id
|
||||
name
|
||||
surname
|
||||
clinicId
|
||||
}
|
||||
medicalRecord {
|
||||
id
|
||||
description
|
||||
contentType
|
||||
url
|
||||
downloadUrl
|
||||
token
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# ⏱ DATETIME PARSER
|
||||
# ==============================
|
||||
def parse_dt(s):
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
return datetime.strptime(s[:19], "%Y-%m-%dT%H:%M:%S")
|
||||
except:
|
||||
return None
|
||||
|
||||
# ==============================
|
||||
# 🔐 TOKEN
|
||||
# ==============================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
return tok.replace("Bearer ", "")
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH MESSAGES
|
||||
# ==============================
|
||||
def fetch_messages(headers, request_id):
|
||||
payload = {
|
||||
"operationName": "UseMessages_ListMessages",
|
||||
"query": GRAPHQL_QUERY_MESSAGES,
|
||||
"variables": {"requestId": request_id, "updatedSince": None},
|
||||
}
|
||||
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
if r.status_code != 200:
|
||||
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
return r.json().get("data", {}).get("messages", []) or []
|
||||
|
||||
|
||||
# ==============================
|
||||
# 💾 SAVE MESSAGE
|
||||
# ==============================
|
||||
def insert_message(cur, req_id, msg):
|
||||
|
||||
sender = msg.get("sender") or {}
|
||||
sender_name = " ".join(
|
||||
x for x in [sender.get("name"), sender.get("surname")] if x
|
||||
) or None
|
||||
|
||||
sql = """
|
||||
INSERT INTO medevio_conversation (
|
||||
id, request_id,
|
||||
sender_name, sender_id, sender_clinic_id,
|
||||
text, created_at, read_at, updated_at,
|
||||
attachment_url, attachment_description, attachment_content_type
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
sender_name = VALUES(sender_name),
|
||||
sender_id = VALUES(sender_id),
|
||||
sender_clinic_id = VALUES(sender_clinic_id),
|
||||
text = VALUES(text),
|
||||
created_at = VALUES(created_at),
|
||||
read_at = VALUES(read_at),
|
||||
updated_at = VALUES(updated_at),
|
||||
attachment_url = VALUES(attachment_url),
|
||||
attachment_description = VALUES(attachment_description),
|
||||
attachment_content_type = VALUES(attachment_content_type)
|
||||
"""
|
||||
|
||||
mr = msg.get("medicalRecord") or {}
|
||||
|
||||
cur.execute(sql, (
|
||||
msg.get("id"),
|
||||
req_id,
|
||||
sender_name,
|
||||
sender.get("id"),
|
||||
sender.get("clinicId"),
|
||||
msg.get("text"),
|
||||
parse_dt(msg.get("createdAt")),
|
||||
parse_dt(msg.get("readAt")),
|
||||
parse_dt(msg.get("updatedAt")),
|
||||
mr.get("downloadUrl") or mr.get("url"),
|
||||
mr.get("description"),
|
||||
mr.get("contentType")
|
||||
))
|
||||
|
||||
|
||||
# ==============================
|
||||
# 💾 DOWNLOAD MESSAGE ATTACHMENT
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, msg, existing_ids):
|
||||
|
||||
mr = msg.get("medicalRecord") or {}
|
||||
attachment_id = mr.get("id")
|
||||
if not attachment_id:
|
||||
return
|
||||
|
||||
if attachment_id in existing_ids:
|
||||
return
|
||||
|
||||
url = mr.get("downloadUrl") or mr.get("url")
|
||||
if not url:
|
||||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
data = r.content
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Failed to download: {e}")
|
||||
return
|
||||
|
||||
filename = url.split("/")[-1].split("?")[0]
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type,
|
||||
filename, content_type, file_size, created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
"MESSAGE_ATTACHMENT",
|
||||
filename,
|
||||
mr.get("contentType"),
|
||||
len(data),
|
||||
parse_dt(msg.get("createdAt")),
|
||||
data
|
||||
))
|
||||
|
||||
existing_ids.add(attachment_id)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# ---- Load existing attachments
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||
|
||||
safe_print(f"📦 Already downloaded attachments: {len(existing_ids)}\n")
|
||||
|
||||
# ---- Select pozadavky needing message sync
|
||||
sql = """
|
||||
SELECT id
|
||||
FROM pozadavky
|
||||
WHERE messagesProcessed IS NULL
|
||||
OR messagesProcessed < updatedAt
|
||||
"""
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql)
|
||||
requests_to_process = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Found {len(requests_to_process)} pozadavků requiring message sync.\n")
|
||||
|
||||
# ---- Process each record
|
||||
for idx, row in enumerate(requests_to_process, 1):
|
||||
req_id = row["id"]
|
||||
safe_print(f"[{idx}/{len(requests_to_process)}] Processing {req_id} …")
|
||||
|
||||
messages = fetch_messages(headers, req_id)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
for msg in messages:
|
||||
insert_message(cur, req_id, msg)
|
||||
insert_download(cur, req_id, msg, existing_ids)
|
||||
conn.commit()
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
safe_print(f" ✅ {len(messages)} messages saved\n")
|
||||
time.sleep(0.25)
|
||||
|
||||
conn.close()
|
||||
safe_print("🎉 Done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,293 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Delta sync Medevio communication.
|
||||
Stáhne pouze zprávy změněné po messagesProcessed pro každý požadavek.
|
||||
"""
|
||||
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# ==============================
|
||||
# UTF-8 SAFE OUTPUT
|
||||
# ==============================
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# CONFIG
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY_MESSAGES = r"""
|
||||
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
|
||||
messages: listMessages(
|
||||
patientRequestId: $requestId,
|
||||
updatedSince: $updatedSince
|
||||
) {
|
||||
id
|
||||
createdAt
|
||||
updatedAt
|
||||
readAt
|
||||
text
|
||||
type
|
||||
sender {
|
||||
id
|
||||
name
|
||||
surname
|
||||
clinicId
|
||||
}
|
||||
medicalRecord {
|
||||
id
|
||||
description
|
||||
contentType
|
||||
url
|
||||
downloadUrl
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
# ==============================
|
||||
# HELPERS
|
||||
# ==============================
|
||||
def parse_dt(s):
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
return tok.replace("Bearer ", "")
|
||||
|
||||
|
||||
# ==============================
|
||||
# FETCH MESSAGES (DELTA)
|
||||
# ==============================
|
||||
def fetch_messages(headers, request_id, updated_since):
|
||||
payload = {
|
||||
"operationName": "UseMessages_ListMessages",
|
||||
"query": GRAPHQL_QUERY_MESSAGES,
|
||||
"variables": {
|
||||
"requestId": request_id,
|
||||
"updatedSince": updated_since,
|
||||
},
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
"https://api.medevio.cz/graphql",
|
||||
json=payload,
|
||||
headers=headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if r.status_code != 200:
|
||||
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
|
||||
j = r.json()
|
||||
if "errors" in j:
|
||||
safe_print(f"❌ GraphQL error for {request_id}: {j['errors']}")
|
||||
return []
|
||||
|
||||
return j.get("data", {}).get("messages", []) or []
|
||||
|
||||
|
||||
# ==============================
|
||||
# INSERT MESSAGE
|
||||
# ==============================
|
||||
def insert_message(cur, req_id, msg):
|
||||
sender = msg.get("sender") or {}
|
||||
sender_name = " ".join(
|
||||
x for x in [sender.get("name"), sender.get("surname")] if x
|
||||
) or None
|
||||
|
||||
mr = msg.get("medicalRecord") or {}
|
||||
|
||||
sql = """
|
||||
INSERT INTO medevio_conversation (
|
||||
id, request_id,
|
||||
sender_name, sender_id, sender_clinic_id,
|
||||
text, created_at, read_at, updated_at,
|
||||
attachment_url, attachment_description, attachment_content_type
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
sender_name = VALUES(sender_name),
|
||||
sender_id = VALUES(sender_id),
|
||||
sender_clinic_id = VALUES(sender_clinic_id),
|
||||
text = VALUES(text),
|
||||
created_at = VALUES(created_at),
|
||||
read_at = VALUES(read_at),
|
||||
updated_at = VALUES(updated_at),
|
||||
attachment_url = VALUES(attachment_url),
|
||||
attachment_description = VALUES(attachment_description),
|
||||
attachment_content_type = VALUES(attachment_content_type)
|
||||
"""
|
||||
|
||||
cur.execute(sql, (
|
||||
msg.get("id"),
|
||||
req_id,
|
||||
sender_name,
|
||||
sender.get("id"),
|
||||
sender.get("clinicId"),
|
||||
msg.get("text"),
|
||||
parse_dt(msg.get("createdAt")),
|
||||
parse_dt(msg.get("readAt")),
|
||||
parse_dt(msg.get("updatedAt")),
|
||||
mr.get("downloadUrl") or mr.get("url"),
|
||||
mr.get("description"),
|
||||
mr.get("contentType")
|
||||
))
|
||||
|
||||
|
||||
# ==============================
|
||||
# INSERT ATTACHMENT (DEDUP)
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, msg, existing_ids):
|
||||
mr = msg.get("medicalRecord") or {}
|
||||
attachment_id = mr.get("id")
|
||||
if not attachment_id or attachment_id in existing_ids:
|
||||
return
|
||||
|
||||
url = mr.get("downloadUrl") or mr.get("url")
|
||||
if not url:
|
||||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
data = r.content
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Attachment download failed: {e}")
|
||||
return
|
||||
|
||||
filename = url.split("/")[-1].split("?")[0]
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type,
|
||||
filename, content_type, file_size, created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
"MESSAGE_ATTACHMENT",
|
||||
filename,
|
||||
mr.get("contentType"),
|
||||
len(data),
|
||||
parse_dt(msg.get("createdAt")),
|
||||
data
|
||||
))
|
||||
|
||||
existing_ids.add(attachment_id)
|
||||
|
||||
|
||||
# ==============================
|
||||
# MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# existing attachments
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {r["attachment_id"] for r in cur.fetchall()}
|
||||
|
||||
# select requests needing sync
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id, messagesProcessed
|
||||
FROM pozadavky
|
||||
WHERE messagesProcessed IS NULL
|
||||
OR messagesProcessed < updatedAt
|
||||
""")
|
||||
rows = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Found {len(rows)} requests for message delta-sync\n")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
updated_since = row["messagesProcessed"]
|
||||
if updated_since:
|
||||
updated_since = updated_since.replace(microsecond=0).isoformat() + "Z"
|
||||
|
||||
safe_print(f"[{i}/{len(rows)}] {req_id}")
|
||||
|
||||
messages = fetch_messages(headers, req_id, updated_since)
|
||||
if not messages:
|
||||
safe_print(" ⏭ No new messages")
|
||||
else:
|
||||
with conn.cursor() as cur:
|
||||
for msg in messages:
|
||||
insert_message(cur, req_id, msg)
|
||||
insert_download(cur, req_id, msg, existing_ids)
|
||||
conn.commit()
|
||||
safe_print(f" ✅ {len(messages)} new/updated messages")
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s",
|
||||
(req_id,)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.25)
|
||||
|
||||
conn.close()
|
||||
safe_print("\n🎉 Delta message sync DONE")
|
||||
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
||||
and (optionally) createdAt is newer than a cutoff date.
|
||||
Store them in MySQL table `medevio_downloads`, and update pozadavky.attachmentsProcessed.
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc or not enc.lower().startswith("utf"):
|
||||
# strip emoji + characters outside BMP
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
# ASCII fallback
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
CREATED_AFTER = "2024-12-01" # optional filter
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
try:
|
||||
return url.split("/")[-1].split("?")[0]
|
||||
except:
|
||||
return "unknown_filename"
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH ATTACHMENTS
|
||||
# ==============================
|
||||
def fetch_attachments(headers, request_id):
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {"requestId": request_id},
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
if r.status_code != 200:
|
||||
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
return r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
|
||||
|
||||
# ==============================
|
||||
# 💾 SAVE TO MYSQL
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, a, m, created_date, existing_ids):
|
||||
|
||||
attachment_id = a.get("id")
|
||||
if attachment_id in existing_ids:
|
||||
safe_print(f" ⏭️ Already downloaded {attachment_id}")
|
||||
return False
|
||||
|
||||
url = m.get("downloadUrl")
|
||||
if not url:
|
||||
safe_print(" ⚠️ Missing download URL")
|
||||
return False
|
||||
|
||||
filename = extract_filename_from_url(url)
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=30)
|
||||
r.raise_for_status()
|
||||
content = r.content
|
||||
except Exception as e:
|
||||
safe_print(f" ⚠️ Download failed {url}: {e}")
|
||||
return False
|
||||
|
||||
file_size = len(content)
|
||||
attachment_type = a.get("attachmentType")
|
||||
content_type = m.get("contentType")
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type,
|
||||
filename, content_type, file_size,
|
||||
created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
attachment_type,
|
||||
filename,
|
||||
content_type,
|
||||
file_size,
|
||||
created_date,
|
||||
content,
|
||||
))
|
||||
|
||||
existing_ids.add(attachment_id)
|
||||
safe_print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||
return True
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# Load existing attachments
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||
|
||||
safe_print(f"✅ {len(existing_ids)} attachments already saved.")
|
||||
|
||||
# Build query for pozadavky
|
||||
sql = """
|
||||
SELECT id, pacient_prijmeni, pacient_jmeno, createdAt, updatedAt, attachmentsProcessed
|
||||
FROM pozadavky
|
||||
WHERE attachmentsProcessed IS NULL
|
||||
OR updatedAt > attachmentsProcessed
|
||||
"""
|
||||
params = []
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
params.append(CREATED_AFTER)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, params)
|
||||
req_rows = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Found {len(req_rows)} pozadavky to process.")
|
||||
|
||||
# Process each pozadavek
|
||||
for i, row in enumerate(req_rows, 1):
|
||||
req_id = row["id"]
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
jmeno = row.get("pacient_jmeno") or ""
|
||||
created_date = row.get("createdAt") or datetime.now()
|
||||
|
||||
safe_print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||
|
||||
attachments = fetch_attachments(headers, req_id)
|
||||
|
||||
if not attachments:
|
||||
safe_print(" ⚠️ No attachments found")
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
insert_download(cur, req_id, a, m, created_date, existing_ids)
|
||||
conn.commit()
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
safe_print(f" ✅ Done ({len(attachments)} attachments)")
|
||||
time.sleep(0.3)
|
||||
|
||||
conn.close()
|
||||
safe_print("\n🎯 All attachments processed.")
|
||||
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,252 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str = ""):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
# Strip emoji and characters outside BMP for Task Scheduler
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
# ASCII fallback
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
"""Replace invalid filename characters with underscore."""
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def make_abbrev(title: str) -> str:
|
||||
if not title:
|
||||
return ""
|
||||
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
|
||||
abbr = ""
|
||||
for w in words:
|
||||
if w.isdigit():
|
||||
abbr += w
|
||||
else:
|
||||
abbr += w[0]
|
||||
return abbr.upper()
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧹 DELETE UNEXPECTED FILES
|
||||
# ==============================
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file():
|
||||
if f.name.startswith("▲"):
|
||||
continue
|
||||
sanitized = sanitize_name(f.name)
|
||||
if sanitized not in valid_files:
|
||||
safe_print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Could not delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📦 DB CONNECTION
|
||||
# ==============================
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||
cur_blob = conn.cursor()
|
||||
|
||||
safe_print("🔍 Loading metadata from DB (FAST)…")
|
||||
|
||||
cur_meta.execute("""
|
||||
SELECT d.id AS download_id,
|
||||
d.request_id,
|
||||
d.filename,
|
||||
d.created_at,
|
||||
p.updatedAt AS req_updated_at,
|
||||
p.pacient_jmeno AS jmeno,
|
||||
p.pacient_prijmeni AS prijmeni,
|
||||
p.displayTitle
|
||||
FROM medevio_downloads d
|
||||
JOIN pozadavky p ON d.request_id = p.id
|
||||
ORDER BY p.updatedAt DESC
|
||||
""")
|
||||
|
||||
rows = cur_meta.fetchall()
|
||||
safe_print(f"📋 Found {len(rows)} attachment records.\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP WITH PROGRESS
|
||||
# ==============================
|
||||
|
||||
unique_request_ids = []
|
||||
seen = set()
|
||||
for r in rows:
|
||||
req_id = r["request_id"]
|
||||
if req_id not in seen:
|
||||
unique_request_ids.append(req_id)
|
||||
seen.add(req_id)
|
||||
|
||||
total_requests = len(unique_request_ids)
|
||||
safe_print(f"🔄 Processing {total_requests} unique requests...\n")
|
||||
|
||||
processed_requests = set()
|
||||
current_index = 0
|
||||
|
||||
for r in rows:
|
||||
req_id = r["request_id"]
|
||||
|
||||
if req_id in processed_requests:
|
||||
continue
|
||||
processed_requests.add(req_id)
|
||||
|
||||
current_index += 1
|
||||
percent = (current_index / total_requests) * 100
|
||||
|
||||
safe_print(f"\n[ {percent:5.1f}% ] Processing request {current_index} / {total_requests} → {req_id}")
|
||||
|
||||
# ========== FETCH VALID FILENAMES ==========
|
||||
cur_meta.execute(
|
||||
"SELECT filename FROM medevio_downloads WHERE request_id=%s",
|
||||
(req_id,)
|
||||
)
|
||||
valid_files = {sanitize_name(row["filename"]) for row in cur_meta.fetchall()}
|
||||
|
||||
# ========== BUILD FOLDER NAME ==========
|
||||
updated_at = r["req_updated_at"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(r["jmeno"] or "")
|
||||
title = r.get("displayTitle") or ""
|
||||
abbr = make_abbrev(title)
|
||||
|
||||
clean_folder_name = sanitize_name(
|
||||
f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}"
|
||||
)
|
||||
|
||||
# ========== DETECT EXISTING FOLDER ==========
|
||||
existing_folder = None
|
||||
|
||||
for f in BASE_DIR.iterdir():
|
||||
if f.is_dir() and req_id in f.name:
|
||||
existing_folder = f
|
||||
break
|
||||
|
||||
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
|
||||
|
||||
# ========== MERGE DUPLICATES ==========
|
||||
possible_dups = [
|
||||
f for f in BASE_DIR.iterdir()
|
||||
if f.is_dir() and req_id in f.name and f != main_folder
|
||||
]
|
||||
|
||||
for dup in possible_dups:
|
||||
safe_print(f"♻️ Merging duplicate folder: {dup.name}")
|
||||
|
||||
clean_folder(dup, valid_files)
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in dup.iterdir():
|
||||
if f.is_file():
|
||||
target = main_folder / f.name
|
||||
if not target.exists():
|
||||
f.rename(target)
|
||||
|
||||
shutil.rmtree(dup, ignore_errors=True)
|
||||
|
||||
# ========== CLEAN MAIN FOLDER ==========
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# ========== DOWNLOAD MISSING FILES ==========
|
||||
added_new_file = False
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for filename in valid_files:
|
||||
dest_plain = main_folder / filename
|
||||
dest_marked = main_folder / ("▲" + filename)
|
||||
|
||||
if dest_plain.exists() or dest_marked.exists():
|
||||
continue
|
||||
|
||||
added_new_file = True
|
||||
|
||||
cur_blob.execute(
|
||||
"SELECT file_content FROM medevio_downloads "
|
||||
"WHERE request_id=%s AND filename=%s",
|
||||
(req_id, filename)
|
||||
)
|
||||
row = cur_blob.fetchone()
|
||||
if not row:
|
||||
continue
|
||||
|
||||
content = row[0]
|
||||
if not content:
|
||||
continue
|
||||
|
||||
with open(dest_plain, "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
safe_print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
|
||||
|
||||
# ========== REMOVE ▲ FLAG IF NEW FILES ADDED ==========
|
||||
if added_new_file and "▲" in main_folder.name:
|
||||
new_name = main_folder.name.replace("▲", "").strip()
|
||||
new_path = main_folder.parent / new_name
|
||||
|
||||
if new_path != main_folder:
|
||||
try:
|
||||
main_folder.rename(new_path)
|
||||
safe_print(f"🔄 Folder flag ▲ removed → {new_name}")
|
||||
main_folder = new_path
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Could not rename folder: {e}")
|
||||
|
||||
safe_print("\n🎯 Export complete.\n")
|
||||
|
||||
cur_blob.close()
|
||||
cur_meta.close()
|
||||
conn.close()
|
||||
@@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 HELPERS
|
||||
# ==============================
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
"""Replace invalid Windows filename characters."""
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def make_abbrev(title: str) -> str:
|
||||
"""Create abbreviation from title."""
|
||||
if not title:
|
||||
return ""
|
||||
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
|
||||
abbr = ""
|
||||
for w in words:
|
||||
if w.isdigit():
|
||||
abbr += w
|
||||
else:
|
||||
abbr += w[0]
|
||||
return abbr.upper()
|
||||
|
||||
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
"""Remove unexpected files except ▲ files."""
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file():
|
||||
if f.name.startswith("▲"):
|
||||
continue
|
||||
sanitized = sanitize_name(f.name)
|
||||
if sanitized not in valid_files:
|
||||
print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📦 DB CONNECTION
|
||||
# ==============================
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||
cur_blob = conn.cursor()
|
||||
|
||||
print("🔍 Loading only requests with NEW attachments…")
|
||||
|
||||
cur_meta.execute("""
|
||||
SELECT
|
||||
p.id AS request_id,
|
||||
p.displayTitle,
|
||||
p.pacient_jmeno,
|
||||
p.pacient_prijmeni,
|
||||
p.updatedAt,
|
||||
p.attachmentsProcessed,
|
||||
d.filename,
|
||||
d.created_at
|
||||
FROM pozadavky p
|
||||
JOIN medevio_downloads d ON d.request_id = p.id
|
||||
LEFT JOIN (
|
||||
SELECT request_id, MAX(created_at) AS last_attachment_ts
|
||||
FROM medevio_downloads
|
||||
GROUP BY request_id
|
||||
) x ON x.request_id = p.id
|
||||
WHERE p.attachmentsProcessed IS NULL
|
||||
OR p.attachmentsProcessed < x.last_attachment_ts
|
||||
ORDER BY p.updatedAt DESC;
|
||||
""")
|
||||
|
||||
rows = cur_meta.fetchall()
|
||||
print(f"📋 Found {len(rows)} attachment rows belonging to requests needing processing.\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 PREPARE REQUEST GROUPING
|
||||
# ==============================
|
||||
|
||||
grouped = defaultdict(list)
|
||||
for r in rows:
|
||||
grouped[r["request_id"]].append(r)
|
||||
|
||||
unique_request_ids = list(grouped.keys())
|
||||
total_requests = len(unique_request_ids)
|
||||
|
||||
print(f"🔄 Processing {total_requests} requests needing updates…\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP
|
||||
# ==============================
|
||||
|
||||
index = 0
|
||||
|
||||
for req_id in unique_request_ids:
|
||||
index += 1
|
||||
pct = (index / total_requests) * 100
|
||||
|
||||
print(f"\n[ {pct:5.1f}% ] Processing request {index}/{total_requests} → {req_id}")
|
||||
|
||||
req_rows = grouped[req_id]
|
||||
first = req_rows[0]
|
||||
|
||||
# Build folder name
|
||||
updated_at = first["updatedAt"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
prijmeni = sanitize_name(first["pacient_prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(first["pacient_jmeno"] or "")
|
||||
abbr = make_abbrev(first["displayTitle"])
|
||||
|
||||
desired_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
|
||||
|
||||
# Detect existing folder for request
|
||||
main_folder = None
|
||||
for f in BASE_DIR.iterdir():
|
||||
if f.is_dir() and req_id in f.name:
|
||||
main_folder = f
|
||||
break
|
||||
|
||||
if not main_folder:
|
||||
main_folder = BASE_DIR / desired_folder_name
|
||||
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Build valid filename set
|
||||
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
|
||||
|
||||
# Clean unexpected non-▲ files
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# Track if ANY new files were downloaded
|
||||
added_new_file = False
|
||||
|
||||
# DOWNLOAD MISSING FILES
|
||||
for r in req_rows:
|
||||
filename = sanitize_name(r["filename"])
|
||||
dest_plain = main_folder / filename
|
||||
dest_flag = main_folder / ("▲" + filename)
|
||||
|
||||
# Skip if file already exists (plain or ▲)
|
||||
if dest_plain.exists() or dest_flag.exists():
|
||||
continue
|
||||
|
||||
# Fetch content
|
||||
cur_blob.execute("""
|
||||
SELECT file_content
|
||||
FROM medevio_downloads
|
||||
WHERE request_id=%s AND filename=%s
|
||||
""", (req_id, r["filename"]))
|
||||
|
||||
row = cur_blob.fetchone()
|
||||
if not row or not row[0]:
|
||||
continue
|
||||
|
||||
with open(dest_plain, "wb") as f:
|
||||
f.write(row[0])
|
||||
|
||||
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
|
||||
added_new_file = True
|
||||
|
||||
# ------------------------------------
|
||||
# 🟦 FOLDER ▲ LOGIC (IMPORTANT)
|
||||
# ------------------------------------
|
||||
if added_new_file:
|
||||
# If folder contains ▲ in its name → remove it
|
||||
if "▲" in main_folder.name:
|
||||
new_name = main_folder.name.replace("▲", "").strip()
|
||||
new_path = main_folder.parent / new_name
|
||||
|
||||
try:
|
||||
main_folder.rename(new_path)
|
||||
print(f"🔄 Folder flag ▲ removed → {new_name}")
|
||||
main_folder = new_path
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not rename folder: {e}")
|
||||
else:
|
||||
# NO new files → NEVER rename folder
|
||||
pass
|
||||
|
||||
# Mark request as processed
|
||||
cur_meta.execute(
|
||||
"UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id=%s",
|
||||
(req_id,)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
# ==============================
|
||||
# 🏁 DONE
|
||||
# ==============================
|
||||
|
||||
print("\n🎯 Export complete.\n")
|
||||
cur_blob.close()
|
||||
cur_meta.close()
|
||||
conn.close()
|
||||
@@ -0,0 +1,193 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
"""Replace invalid filename characters with underscore."""
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def make_abbrev(title: str) -> str:
|
||||
"""Create abbreviation from displayTitle."""
|
||||
if not title:
|
||||
return ""
|
||||
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
|
||||
abbr = ""
|
||||
for w in words:
|
||||
abbr += w if w.isdigit() else w[0]
|
||||
return abbr.upper()
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧹 DELETE UNEXPECTED FILES
|
||||
# ==============================
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file():
|
||||
if f.name.startswith("▲"):
|
||||
continue
|
||||
sanitized = sanitize_name(f.name)
|
||||
if sanitized not in valid_files:
|
||||
print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📦 DB CONNECTION
|
||||
# ==============================
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||
cur_blob = conn.cursor()
|
||||
|
||||
print("🔍 Loading ALL metadata without file_content…")
|
||||
|
||||
# ⭐ Load ALL metadata once (NO BLOBs)
|
||||
cur_meta.execute("""
|
||||
SELECT
|
||||
d.request_id,
|
||||
d.filename,
|
||||
d.created_at,
|
||||
p.updatedAt AS req_updated_at,
|
||||
p.pacient_jmeno AS jmeno,
|
||||
p.pacient_prijmeni AS prijmeni,
|
||||
p.displayTitle
|
||||
FROM medevio_downloads d
|
||||
JOIN pozadavky p ON d.request_id = p.id
|
||||
ORDER BY p.updatedAt DESC;
|
||||
""")
|
||||
|
||||
rows = cur_meta.fetchall()
|
||||
print(f"📋 Found {len(rows)} metadata rows.\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 PRE-GROUP METADATA
|
||||
# ==============================
|
||||
|
||||
# Build dictionary: request_id → all metadata rows for that request
|
||||
grouped = {}
|
||||
for row in rows:
|
||||
grouped.setdefault(row["request_id"], []).append(row)
|
||||
|
||||
unique_request_ids = list(grouped.keys())
|
||||
total_requests = len(unique_request_ids)
|
||||
|
||||
print(f"🔄 Processing {total_requests} unique requests…\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP
|
||||
# ==============================
|
||||
|
||||
for idx, req_id in enumerate(unique_request_ids, start=1):
|
||||
pct = (idx / total_requests) * 100
|
||||
req_rows = grouped[req_id]
|
||||
first = req_rows[0]
|
||||
|
||||
print(f"\n[ {pct:5.1f}% ] Processing request {idx}/{total_requests} → {req_id}")
|
||||
|
||||
# ======================
|
||||
# Build folder name
|
||||
# ======================
|
||||
updated_at = first["req_updated_at"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
prijmeni = sanitize_name(first["prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(first["jmeno"] or "")
|
||||
abbr = make_abbrev(first["displayTitle"] or "")
|
||||
|
||||
clean_folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}")
|
||||
|
||||
# Detect existing folder
|
||||
existing_folder = None
|
||||
for f in BASE_DIR.iterdir():
|
||||
if f.is_dir() and req_id in f.name:
|
||||
existing_folder = f
|
||||
break
|
||||
|
||||
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# ======================
|
||||
# Valid files for this request
|
||||
# ======================
|
||||
valid_files = {sanitize_name(r["filename"]) for r in req_rows}
|
||||
|
||||
# Clean unexpected files
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# ======================
|
||||
# DOWNLOAD MISSING FILES → only now load BLOBs
|
||||
# ======================
|
||||
added_new_file = False
|
||||
|
||||
for r in req_rows:
|
||||
filename = sanitize_name(r["filename"])
|
||||
dest_plain = main_folder / filename
|
||||
dest_marked = main_folder / ("▲" + filename)
|
||||
|
||||
if dest_plain.exists() or dest_marked.exists():
|
||||
continue
|
||||
|
||||
added_new_file = True
|
||||
|
||||
# ⭐ Load BLOB only when needed
|
||||
cur_blob.execute("""
|
||||
SELECT file_content
|
||||
FROM medevio_downloads
|
||||
WHERE request_id=%s AND filename=%s
|
||||
""", (req_id, r["filename"]))
|
||||
|
||||
row = cur_blob.fetchone()
|
||||
if not row or not row[0]:
|
||||
continue
|
||||
|
||||
with open(dest_plain, "wb") as f:
|
||||
f.write(row[0])
|
||||
|
||||
print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
|
||||
|
||||
# ======================
|
||||
# Folder-level ▲ logic
|
||||
# ======================
|
||||
if added_new_file and "▲" in main_folder.name:
|
||||
new_name = main_folder.name.replace("▲", "").strip()
|
||||
new_path = main_folder.parent / new_name
|
||||
|
||||
try:
|
||||
main_folder.rename(new_path)
|
||||
main_folder = new_path
|
||||
print(f"🔄 Folder flag ▲ removed → {new_name}")
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not rename folder: {e}")
|
||||
|
||||
cur_blob.close()
|
||||
cur_meta.close()
|
||||
conn.close()
|
||||
|
||||
print("\n🎯 Export complete.\n")
|
||||
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
"""Remove files that do NOT exist in MySQL for this request."""
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file() and sanitize_name(f.name) not in valid_files:
|
||||
print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
print(f"⚠️ Cannot delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📥 LOAD EVERYTHING IN ONE QUERY
|
||||
# ==============================
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
cur = conn.cursor(pymysql.cursors.DictCursor)
|
||||
|
||||
print("📥 Loading ALL metadata + BLOBs with ONE MySQL query…")
|
||||
|
||||
cur.execute("""
|
||||
SELECT
|
||||
d.id AS download_id,
|
||||
d.request_id,
|
||||
d.filename,
|
||||
d.file_content,
|
||||
p.updatedAt AS req_updated_at,
|
||||
p.pacient_jmeno AS jmeno,
|
||||
p.pacient_prijmeni AS prijmeni
|
||||
FROM medevio_downloads d
|
||||
JOIN pozadavky p ON d.request_id = p.id
|
||||
ORDER BY p.updatedAt DESC, d.created_at ASC
|
||||
""")
|
||||
|
||||
rows = cur.fetchall()
|
||||
print(f"📦 Loaded {len(rows)} total file rows.\n")
|
||||
|
||||
conn.close()
|
||||
|
||||
# ==============================
|
||||
# 🔄 ORGANIZE ROWS PER REQUEST
|
||||
# ==============================
|
||||
requests = {} # req_id → list of file dicts
|
||||
|
||||
for r in rows:
|
||||
req_id = r["request_id"]
|
||||
if req_id not in requests:
|
||||
requests[req_id] = []
|
||||
requests[req_id].append(r)
|
||||
|
||||
print(f"📌 Unique requests: {len(requests)}\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP – SAME LOGIC AS BEFORE
|
||||
# ==============================
|
||||
for req_id, filelist in requests.items():
|
||||
|
||||
# ========== GET UPDATEDAT (same logic) ==========
|
||||
any_row = filelist[0]
|
||||
updated_at = any_row["req_updated_at"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
prijmeni = sanitize_name(any_row["prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(any_row["jmeno"] or "")
|
||||
|
||||
folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} {req_id}")
|
||||
main_folder = BASE_DIR / folder_name
|
||||
|
||||
# ========== VALID FILES ==========
|
||||
valid_files = {sanitize_name(r["filename"]) for r in filelist}
|
||||
|
||||
# ========== FIND OLD FOLDERS ==========
|
||||
possible_dups = [
|
||||
f for f in BASE_DIR.iterdir()
|
||||
if f.is_dir() and req_id in f.name and f != main_folder
|
||||
]
|
||||
|
||||
# ========== MERGE OLD FOLDERS ==========
|
||||
for dup in possible_dups:
|
||||
print(f"♻️ Merging folder: {dup.name}")
|
||||
|
||||
clean_folder(dup, valid_files)
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in dup.iterdir():
|
||||
if f.is_file():
|
||||
target = main_folder / f.name
|
||||
if not target.exists():
|
||||
f.rename(target)
|
||||
|
||||
shutil.rmtree(dup, ignore_errors=True)
|
||||
|
||||
# ========== CLEAN MAIN FOLDER ==========
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# ========== SAVE FILES (fast now) ==========
|
||||
for r in filelist:
|
||||
filename = sanitize_name(r["filename"])
|
||||
dest = main_folder / filename
|
||||
|
||||
if dest.exists():
|
||||
continue
|
||||
|
||||
content = r["file_content"]
|
||||
if not content:
|
||||
continue
|
||||
|
||||
with open(dest, "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"💾 Saved: {dest.relative_to(BASE_DIR)}")
|
||||
|
||||
print("\n🎯 Export complete.\n")
|
||||
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import importlib.util
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Load FunctionsLoader
|
||||
FUNCTIONS_LOADER_PATH = Path(r"C:\Reporting\Functions\FunctionsLoader.py")
|
||||
spec = importlib.util.spec_from_file_location("FunctionsLoader", FUNCTIONS_LOADER_PATH)
|
||||
FunctionsLoader = importlib.util.module_from_spec(spec)
|
||||
sys.modules["FunctionsLoader"] = FunctionsLoader
|
||||
spec.loader.exec_module(FunctionsLoader)
|
||||
|
||||
"""
|
||||
Spustí všechny PRAVIDELNÉ skripty v daném pořadí:
|
||||
|
||||
0) PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
|
||||
1) PRAVIDELNE_1_ReadLast300DonePozadavku.py
|
||||
2) PRAVIDELNE_2_ReadPoznamky.py
|
||||
3) PRAVIDELNE_3_StahniKomunikaci.py
|
||||
4) PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
|
||||
5) PRAVIDELNE_5_SaveToFileSystem incremental.py
|
||||
"""
|
||||
|
||||
import time, socket
|
||||
for _ in range(30):
|
||||
try:
|
||||
socket.create_connection(("192.168.1.76", 3307), timeout=3).close()
|
||||
break
|
||||
except OSError:
|
||||
time.sleep(10)
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# složka, kde leží tento skript i všechny PRAVIDELNE_*.py
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
|
||||
SCRIPTS_IN_ORDER = [
|
||||
"PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py",
|
||||
"PRAVIDELNE_1_ReadLast300DonePozadavku.py",
|
||||
"PRAVIDELNE_2_ReadPoznamky.py",
|
||||
"PRAVIDELNE_3_StahniKomunikaci.py",
|
||||
"PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py",
|
||||
"PRAVIDELNE_5_SaveToFileSystem incremental.py", # má mezeru v názvu, ale v listu je to OK
|
||||
]
|
||||
|
||||
LOG_FILE = BASE_DIR / "PRAVIDELNE_log.txt"
|
||||
|
||||
|
||||
def log(msg: str):
|
||||
"""Zapíše zprávu do log souboru i na konzoli."""
|
||||
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
line = f"[{ts}] {msg}"
|
||||
print(line)
|
||||
try:
|
||||
with LOG_FILE.open("a", encoding="utf-8") as f:
|
||||
f.write(line + "\n")
|
||||
except Exception:
|
||||
# log nesmí shodit běh
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
log("=== START pravidelného běhu ===")
|
||||
|
||||
for script_name in SCRIPTS_IN_ORDER:
|
||||
script_path = BASE_DIR / script_name
|
||||
|
||||
if not script_path.exists():
|
||||
log(f"❌ Skript nenalezen: {script_path}")
|
||||
continue
|
||||
|
||||
log(f"▶ Spouštím: {script_path.name}")
|
||||
|
||||
# spustíme stejným interpretem, kterým běží tento orchestr
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(script_path)],
|
||||
cwd=str(BASE_DIR),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
errors="ignore", # NEZKAZÍ SE, NEZBOŘÍ SE, PROSTĚ IGNORUJE CP1252 NEZÁKONNÉ BYTES
|
||||
)
|
||||
except Exception as e:
|
||||
log(f" 💥 Chyba při spouštění {script_path.name}: {e}")
|
||||
continue
|
||||
|
||||
# vypíšeme návratový kód
|
||||
log(f" ↳ return code: {result.returncode}")
|
||||
|
||||
# pokud něco skript vypsal na stderr, logneme
|
||||
if result.stderr:
|
||||
log(f" ⚠ stderr {script_path.name}:\n{result.stderr.strip()}")
|
||||
|
||||
# stdout můžeš podle chuti také logovat (někdy je toho moc):
|
||||
# if result.stdout:
|
||||
# log(f" ℹ stdout {script_path.name}:\n{result.stdout.strip()}")
|
||||
|
||||
log("=== KONEC pravidelného běhu ===\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,29 @@
|
||||
[2025-12-01 06:37:41] === START pravidelného běhu ===
|
||||
[2025-12-01 06:37:42] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
|
||||
[2025-12-01 06:37:44] ↳ PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py return code: 0
|
||||
[2025-12-01 06:37:44] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
|
||||
[2025-12-01 06:37:48] ↳ PRAVIDELNE_1_ReadLast300DonePozadavku.py return code: 0
|
||||
[2025-12-01 06:37:48] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
|
||||
[2025-12-01 06:37:49] ↳ PRAVIDELNE_2_ReadPoznamky.py return code: 0
|
||||
[2025-12-01 06:37:50] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
|
||||
[2025-12-01 06:37:51] ↳ PRAVIDELNE_3_StahniKomunikaci.py return code: 0
|
||||
[2025-12-01 06:37:52] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
|
||||
[2025-12-01 06:37:53] ↳ PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py return code: 0
|
||||
[2025-12-01 06:37:53] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
|
||||
[2025-12-01 06:38:42] ↳ PRAVIDELNE_5_SaveToFileSystem incremental.py return code: 0
|
||||
[2025-12-01 06:38:43] === KONEC pravidelného běhu ===
|
||||
[2025-12-02 07:04:34] === START pravidelného běhu ===
|
||||
[2025-12-02 07:04:34] ▶ Spouštím: PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py
|
||||
[2025-12-02 07:04:35] ↳ return code: 0
|
||||
[2025-12-02 07:04:35] ▶ Spouštím: PRAVIDELNE_1_ReadLast300DonePozadavku.py
|
||||
[2025-12-02 07:04:39] ↳ return code: 0
|
||||
[2025-12-02 07:04:39] ▶ Spouštím: PRAVIDELNE_2_ReadPoznamky.py
|
||||
[2025-12-02 07:04:40] ↳ return code: 0
|
||||
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_3_StahniKomunikaci.py
|
||||
[2025-12-02 07:04:40] ↳ return code: 0
|
||||
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py
|
||||
[2025-12-02 07:04:40] ↳ return code: 0
|
||||
[2025-12-02 07:04:40] ▶ Spouštím: PRAVIDELNE_5_SaveToFileSystem incremental.py
|
||||
[2025-12-02 07:05:28] ↳ return code: 0
|
||||
[2025-12-02 07:05:28] === KONEC pravidelného běhu ===
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Orchestrator for all PRAVIDELNE scripts in exact order.
|
||||
"""
|
||||
|
||||
import time, socket
|
||||
for _ in range(30):
|
||||
try:
|
||||
socket.create_connection(("192.168.1.76", 3307), timeout=3).close()
|
||||
break
|
||||
except OSError:
|
||||
time.sleep(10)
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# =====================================================================
|
||||
# Import EXACT Functions.py from: C:\Reporting\Fio\Functions.py
|
||||
# This bypasses all other Functions.py files in the system.
|
||||
# =====================================================================
|
||||
|
||||
import importlib.util
|
||||
|
||||
FUNCTIONS_FILE = Path(r"C:\Reporting\Fio\Functions.py")
|
||||
|
||||
spec = importlib.util.spec_from_file_location("Functions_FIO", FUNCTIONS_FILE)
|
||||
Functions_FIO = importlib.util.module_from_spec(spec)
|
||||
sys.modules["Functions_FIO"] = Functions_FIO
|
||||
spec.loader.exec_module(Functions_FIO)
|
||||
|
||||
# correct WhatsApp function
|
||||
SendWhatsAppMessage = Functions_FIO.SendWhatsAppMessage
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# General Orchestrator Settings
|
||||
# =====================================================================
|
||||
|
||||
# folder where orchestrator + sub-scripts live
|
||||
BASE_DIR = Path(__file__).resolve().parent
|
||||
|
||||
SCRIPTS_IN_ORDER = [
|
||||
"PRAVIDELNE_0_READ_ALL_ACTIVE_POZADAVKY.py",
|
||||
"PRAVIDELNE_1_ReadLast300DonePozadavku.py",
|
||||
"PRAVIDELNE_2_ReadPoznamky.py",
|
||||
"PRAVIDELNE_3_StahniKomunikaci.py",
|
||||
"PRAVIDELNE_4_StahniPrilohyUlozDoMySQL.py",
|
||||
"PRAVIDELNE_5_SaveToFileSystem incremental.py",
|
||||
]
|
||||
|
||||
LOG_FILE = BASE_DIR / "PRAVIDELNE_log.txt"
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# Logging + WhatsApp wrappers
|
||||
# =====================================================================
|
||||
|
||||
def log(msg: str):
|
||||
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
line = f"[{ts}] {msg}"
|
||||
print(line)
|
||||
try:
|
||||
with LOG_FILE.open("a", encoding="utf-8") as f:
|
||||
f.write(line + "\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def whatsapp_notify(text: str):
|
||||
"""WhatsApp message wrapper — never allowed to crash orchestrator"""
|
||||
try:
|
||||
SendWhatsAppMessage(text)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# Main orchestrator
|
||||
# =====================================================================
|
||||
|
||||
def main():
|
||||
log("=== START pravidelného běhu ===")
|
||||
whatsapp_notify("🏁 *PRAVIDELNÉ skripty: START*")
|
||||
|
||||
for script_name in SCRIPTS_IN_ORDER:
|
||||
script_path = BASE_DIR / script_name
|
||||
|
||||
if not script_path.exists():
|
||||
err = f"❌ Skript nenalezen: {script_path}"
|
||||
log(err)
|
||||
whatsapp_notify(err)
|
||||
continue
|
||||
|
||||
log(f"▶ Spouštím: {script_path.name}")
|
||||
whatsapp_notify(f"▶ *Spouštím:* {script_path.name}")
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(script_path)],
|
||||
cwd=str(BASE_DIR),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
errors="ignore",
|
||||
)
|
||||
except Exception as e:
|
||||
err = f"💥 Chyba při spouštění {script_path.name}: {e}"
|
||||
log(err)
|
||||
whatsapp_notify(err)
|
||||
continue
|
||||
|
||||
# return code
|
||||
rc_msg = f"↳ {script_path.name} return code: {result.returncode}"
|
||||
log(rc_msg)
|
||||
whatsapp_notify(rc_msg)
|
||||
|
||||
# stderr (warnings/errors)
|
||||
if result.stderr:
|
||||
err_msg = f"⚠ stderr v {script_path.name}:\n{result.stderr.strip()}"
|
||||
log(err_msg)
|
||||
whatsapp_notify(err_msg)
|
||||
|
||||
log("=== KONEC pravidelného běhu ===")
|
||||
whatsapp_notify("✅ *PRAVIDELNÉ skripty: KONEC*\n")
|
||||
|
||||
|
||||
# =====================================================================
|
||||
# Entry point
|
||||
# =====================================================================
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,196 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download all 'Odeslat lékařskou zprávu' attachments from Medevio API
|
||||
and store them (including binary content) directly into MySQL table `medevio_downloads`.
|
||||
|
||||
Each attachment (PDF, image, etc.) is fetched once and saved as LONGBLOB.
|
||||
Duplicate protection is ensured via UNIQUE KEY on `attachment_id`.
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def short_crc8(uuid_str: str) -> str:
|
||||
"""Return deterministic 8-char hex string from any input string (CRC32)."""
|
||||
return f"{zlib.crc32(uuid_str.encode('utf-8')) & 0xffffffff:08x}"
|
||||
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
"""Extracts filename from S3-style URL (between last '/' and first '?')."""
|
||||
try:
|
||||
return url.split("/")[-1].split("?")[0]
|
||||
except Exception:
|
||||
return "unknown_filename"
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
"""Read Bearer token from file."""
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
tok = tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ==============================
|
||||
# 📡 FETCH ATTACHMENTS
|
||||
# ==============================
|
||||
def fetch_attachments(headers, request_id):
|
||||
variables = {"requestId": request_id}
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
if r.status_code != 200:
|
||||
print(f"❌ HTTP {r.status_code} for request {request_id}")
|
||||
return []
|
||||
data = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
return data
|
||||
|
||||
# ==============================
|
||||
# 💾 SAVE TO MYSQL
|
||||
# ==============================
|
||||
def insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date):
|
||||
url = m.get("downloadUrl")
|
||||
if not url:
|
||||
print(" ⚠️ No download URL")
|
||||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url, timeout=45)
|
||||
r.raise_for_status()
|
||||
content = r.content
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Failed to download {url}: {e}")
|
||||
return
|
||||
|
||||
file_size = len(content)
|
||||
filename = extract_filename_from_url(url)
|
||||
attachment_id = a.get("id")
|
||||
attachment_type = a.get("attachmentType")
|
||||
content_type = m.get("contentType")
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type, filename,
|
||||
content_type, file_size, pacient_jmeno, pacient_prijmeni,
|
||||
created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
file_content = VALUES(file_content),
|
||||
file_size = VALUES(file_size),
|
||||
downloaded_at = NOW()
|
||||
""", (
|
||||
req_id,
|
||||
attachment_id,
|
||||
attachment_type,
|
||||
filename,
|
||||
content_type,
|
||||
file_size,
|
||||
jmeno,
|
||||
prijmeni,
|
||||
created_date,
|
||||
content
|
||||
))
|
||||
print(f" 💾 Saved {filename} ({file_size/1024:.1f} kB)")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id, displayTitle, pacient_prijmeni, pacient_jmeno, createdAt
|
||||
FROM pozadavky
|
||||
WHERE displayTitle = 'Odeslat lékařskou zprávu'
|
||||
""")
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Found {len(rows)} 'Odeslat lékařskou zprávu' requests")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
jmeno = row.get("pacient_jmeno") or ""
|
||||
created = row.get("createdAt")
|
||||
|
||||
try:
|
||||
created_date = datetime.strptime(str(created), "%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
created_date = None
|
||||
|
||||
print(f"\n[{i}/{len(rows)}] 🧾 {prijmeni}, {jmeno} ({req_id})")
|
||||
|
||||
attachments = fetch_attachments(headers, req_id)
|
||||
if not attachments:
|
||||
print(" ⚠️ No attachments")
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
insert_download(cur, req_id, a, m, jmeno, prijmeni, created_date)
|
||||
conn.commit()
|
||||
|
||||
print(f" ✅ {len(attachments)} attachments saved for {prijmeni}, {jmeno}")
|
||||
time.sleep(0.5) # be nice to the API
|
||||
|
||||
conn.close()
|
||||
print("\n✅ Done! All attachments stored in MySQL table `medevio_downloads`.")
|
||||
|
||||
# ==============================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user