notebookvb
This commit is contained in:
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
import time
|
||||
from dateutil import parser
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIGURATION
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
BATCH_SIZE = 100
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
# ⭐ NOVÝ TESTOVANÝ DOTAZ – obsahuje lastMessage.createdAt
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestList2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
lastMessage {
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# ================================
|
||||
# 🧿 SAFE DATETIME PARSER (ALWAYS UTC → LOCAL)
|
||||
# ================================
|
||||
def to_mysql_dt_utc(iso_str):
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = parser.isoparse(iso_str)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
dt_local = dt.astimezone()
|
||||
return dt_local.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except:
|
||||
return None
|
||||
|
||||
# ================================
|
||||
# 🔑 TOKEN
|
||||
# ================================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
return tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
# ================================
|
||||
# 💾 UPSERT
|
||||
# ================================
|
||||
def upsert(conn, r):
|
||||
p = r.get("extendedPatient") or {}
|
||||
api_updated = to_mysql_dt_utc(r.get("updatedAt"))
|
||||
last_msg = r.get("lastMessage") or {}
|
||||
msg_updated = to_mysql_dt_utc(last_msg.get("createdAt"))
|
||||
|
||||
def max_dt(a, b):
|
||||
if a and b:
|
||||
return max(a, b)
|
||||
return a or b
|
||||
|
||||
final_updated = max_dt(api_updated, msg_updated)
|
||||
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
|
||||
vals = (
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt_utc(r.get("createdAt")),
|
||||
final_updated,
|
||||
to_mysql_dt_utc(r.get("doneAt")),
|
||||
to_mysql_dt_utc(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
# ================================
|
||||
# 📡 FETCH ACTIVE PAGE
|
||||
# ================================
|
||||
def fetch_active(headers, offset):
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
"state": "ACTIVE",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestList2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
data = r.json().get("data", {}).get("requestsResponse", {})
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
# ================================
|
||||
# 🧠 MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
print(f"\n=== Sync ACTIVE požadavků @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
||||
|
||||
offset = 0
|
||||
total_processed = 0
|
||||
total_count = None
|
||||
|
||||
while True:
|
||||
batch, count = fetch_active(headers, offset)
|
||||
if total_count is None:
|
||||
total_count = count
|
||||
print(f"📡 Celkem ACTIVE v Medevio: {count}")
|
||||
|
||||
if not batch:
|
||||
break
|
||||
|
||||
for r in batch:
|
||||
upsert(conn, r)
|
||||
|
||||
total_processed += len(batch)
|
||||
print(f" • {total_processed}/{total_count} ACTIVE processed")
|
||||
|
||||
if offset + BATCH_SIZE >= count:
|
||||
break
|
||||
|
||||
offset += BATCH_SIZE
|
||||
time.sleep(0.4)
|
||||
|
||||
conn.close()
|
||||
print("\n✅ ACTIVE sync hotovo!\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pymysql
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from dateutil import parser
|
||||
import time
|
||||
import sys
|
||||
|
||||
# ================================
|
||||
# UTF-8 SAFE OUTPUT (Windows friendly)
|
||||
# ================================
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ================================
|
||||
# 🔧 CONFIG
|
||||
# ================================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
BATCH_SIZE = 500
|
||||
STATES = ["ACTIVE", "DONE"] # explicitně – jinak API vrací jen ACTIVE
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestList2(
|
||||
$clinicSlug: String!,
|
||||
$queueId: String,
|
||||
$queueAssignment: QueueAssignmentFilter!,
|
||||
$state: PatientRequestState,
|
||||
$pageInfo: PageInfo!,
|
||||
$locale: Locale!
|
||||
) {
|
||||
requestsResponse: listPatientRequestsForClinic2(
|
||||
clinicSlug: $clinicSlug,
|
||||
queueId: $queueId,
|
||||
queueAssignment: $queueAssignment,
|
||||
state: $state,
|
||||
pageInfo: $pageInfo
|
||||
) {
|
||||
count
|
||||
patientRequests {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
doneAt
|
||||
removedAt
|
||||
extendedPatient {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
lastMessage {
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
# ================================
|
||||
# TOKEN
|
||||
# ================================
|
||||
def read_token(path: Path) -> str:
|
||||
tok = path.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
return tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
# ================================
|
||||
# DATETIME PARSER
|
||||
# ================================
|
||||
def to_mysql_dt(iso_str):
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
dt = parser.isoparse(iso_str)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=datetime.now().astimezone().tzinfo)
|
||||
return dt.astimezone().strftime("%Y-%m-%d %H:%M:%S")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# ================================
|
||||
# UPSERT
|
||||
# ================================
|
||||
def upsert(conn, r):
|
||||
p = r.get("extendedPatient") or {}
|
||||
|
||||
api_updated = to_mysql_dt(r.get("updatedAt"))
|
||||
msg_updated = to_mysql_dt((r.get("lastMessage") or {}).get("createdAt"))
|
||||
|
||||
final_updated = max(filter(None, [api_updated, msg_updated]), default=None)
|
||||
|
||||
sql = """
|
||||
INSERT INTO pozadavky (
|
||||
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
|
||||
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
displayTitle=VALUES(displayTitle),
|
||||
updatedAt=VALUES(updatedAt),
|
||||
doneAt=VALUES(doneAt),
|
||||
removedAt=VALUES(removedAt),
|
||||
pacient_jmeno=VALUES(pacient_jmeno),
|
||||
pacient_prijmeni=VALUES(pacient_prijmeni),
|
||||
pacient_rodnecislo=VALUES(pacient_rodnecislo)
|
||||
"""
|
||||
|
||||
vals = (
|
||||
r.get("id"),
|
||||
r.get("displayTitle"),
|
||||
to_mysql_dt(r.get("createdAt")),
|
||||
final_updated,
|
||||
to_mysql_dt(r.get("doneAt")),
|
||||
to_mysql_dt(r.get("removedAt")),
|
||||
p.get("name"),
|
||||
p.get("surname"),
|
||||
p.get("identificationNumber"),
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
# ================================
|
||||
# FETCH PAGE (per state)
|
||||
# ================================
|
||||
def fetch_state(headers, state, offset):
|
||||
variables = {
|
||||
"clinicSlug": CLINIC_SLUG,
|
||||
"queueId": None,
|
||||
"queueAssignment": "ANY",
|
||||
"state": state,
|
||||
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
|
||||
"locale": "cs",
|
||||
}
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestList2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": variables,
|
||||
}
|
||||
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()["data"]["requestsResponse"]
|
||||
return data.get("patientRequests", []), data.get("count", 0)
|
||||
|
||||
|
||||
# ================================
|
||||
# MAIN
|
||||
# ================================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
safe_print(f"\n=== FULL Medevio READ-ALL sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
|
||||
|
||||
grand_total = 0
|
||||
|
||||
for state in STATES:
|
||||
safe_print(f"\n🔁 STATE = {state}")
|
||||
offset = 0
|
||||
total = None
|
||||
processed = 0
|
||||
|
||||
while True:
|
||||
batch, count = fetch_state(headers, state, offset)
|
||||
|
||||
if total is None:
|
||||
total = count
|
||||
safe_print(f"📡 {state}: celkem {total}")
|
||||
|
||||
if not batch:
|
||||
break
|
||||
|
||||
for r in batch:
|
||||
upsert(conn, r)
|
||||
|
||||
processed += len(batch)
|
||||
safe_print(f" • {processed}/{total}")
|
||||
|
||||
offset += BATCH_SIZE
|
||||
if offset >= count:
|
||||
break
|
||||
|
||||
time.sleep(0.4)
|
||||
|
||||
grand_total += processed
|
||||
|
||||
conn.close()
|
||||
safe_print(f"\n✅ HOTOVO – celkem zpracováno {grand_total} požadavků\n")
|
||||
|
||||
|
||||
# ================================
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,217 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download and store Medevio questionnaires (userNote + eCRF) for all patient requests.
|
||||
Uses the verified working query "GetPatientRequest2".
|
||||
"""
|
||||
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION (UPDATED TO 192.168.1.50)
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
GRAPHQL_URL = "https://api.medevio.cz/graphql"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🕒 DATETIME FIXER
|
||||
# ==============================
|
||||
def fix_datetime(dt_str):
|
||||
if not dt_str:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# Optional filter
|
||||
CREATED_AFTER = "2025-01-01"
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧮 HELPERS
|
||||
# ==============================
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
if tok.startswith("Bearer "):
|
||||
return tok.split(" ", 1)[1]
|
||||
return tok
|
||||
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
|
||||
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
|
||||
id
|
||||
displayTitle(locale: $locale)
|
||||
createdAt
|
||||
updatedAt
|
||||
userNote
|
||||
eventType
|
||||
extendedPatient(clinicSlug: $clinicSlug) {
|
||||
name
|
||||
surname
|
||||
identificationNumber
|
||||
}
|
||||
ecrfFilledData(locale: $locale) {
|
||||
name
|
||||
groups {
|
||||
label
|
||||
fields {
|
||||
name
|
||||
label
|
||||
type
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def fetch_questionnaire(headers, request_id, clinic_slug):
|
||||
payload = {
|
||||
"operationName": "GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {
|
||||
"requestId": request_id,
|
||||
"clinicSlug": clinic_slug,
|
||||
"locale": "cs",
|
||||
},
|
||||
}
|
||||
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
|
||||
if r.status_code != 200:
|
||||
safe_print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
|
||||
return None
|
||||
return r.json().get("data", {}).get("request")
|
||||
|
||||
|
||||
def insert_questionnaire(cur, req):
|
||||
if not req:
|
||||
return
|
||||
|
||||
patient = req.get("extendedPatient") or {}
|
||||
ecrf_data = req.get("ecrfFilledData")
|
||||
created_at = fix_datetime(req.get("createdAt"))
|
||||
updated_at = fix_datetime(req.get("updatedAt"))
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_questionnaires (
|
||||
request_id, created_at, updated_at, user_note, ecrf_json
|
||||
)
|
||||
VALUES (%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
updated_at = VALUES(updated_at),
|
||||
user_note = VALUES(user_note),
|
||||
ecrf_json = VALUES(ecrf_json),
|
||||
updated_local = NOW()
|
||||
""", (
|
||||
req.get("id"),
|
||||
created_at,
|
||||
updated_at,
|
||||
req.get("userNote"),
|
||||
json.dumps(ecrf_data, ensure_ascii=False),
|
||||
))
|
||||
|
||||
safe_print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN
|
||||
# ==============================
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# load list of requests from the table we just filled
|
||||
with conn.cursor() as cur:
|
||||
sql = """
|
||||
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
|
||||
FROM pozadavky
|
||||
WHERE (questionnaireprocessed IS NULL OR questionnaireprocessed < updatedAt)
|
||||
"""
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
cur.execute(sql, (CREATED_AFTER,))
|
||||
else:
|
||||
cur.execute(sql)
|
||||
|
||||
rows = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Found {len(rows)} requests needing questionnaire check.")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
safe_print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
|
||||
|
||||
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
|
||||
if not req:
|
||||
safe_print(" ⚠️ No questionnaire data found.")
|
||||
continue
|
||||
|
||||
with conn.cursor() as cur:
|
||||
insert_questionnaire(cur, req)
|
||||
cur.execute(
|
||||
"UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s",
|
||||
(req_id,)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.6)
|
||||
|
||||
conn.close()
|
||||
safe_print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# UTF-8 SAFE OUTPUT
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# ==============================
|
||||
# CONFIG (.50)
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
GRAPHQL_QUERY_MESSAGES = r"""
|
||||
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
|
||||
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
|
||||
id createdAt updatedAt readAt text type
|
||||
sender { id name surname clinicId }
|
||||
medicalRecord { id description contentType url downloadUrl createdAt updatedAt }
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def parse_dt(s):
|
||||
if not s: return None
|
||||
try: return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
except: return None
|
||||
|
||||
def read_token(path: Path) -> str:
|
||||
return path.read_text(encoding="utf-8").strip().replace("Bearer ", "")
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# 1. Seznam již stažených příloh (prevence duplicit)
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {r["attachment_id"] for r in cur.fetchall()}
|
||||
|
||||
# 2. Seznam požadavků k synchronizaci
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
SELECT id, messagesProcessed FROM pozadavky
|
||||
WHERE messagesProcessed IS NULL OR messagesProcessed < updatedAt
|
||||
""")
|
||||
rows = cur.fetchall()
|
||||
|
||||
print(f"📋 Počet požadavků k synchronizaci zpráv: {len(rows)}")
|
||||
|
||||
for i, row in enumerate(rows, 1):
|
||||
req_id = row["id"]
|
||||
updated_since = row["messagesProcessed"]
|
||||
if updated_since:
|
||||
updated_since = updated_since.replace(microsecond=0).isoformat() + "Z"
|
||||
|
||||
print(f"[{i}/{len(rows)}] Synchronizuji: {req_id}")
|
||||
|
||||
payload = {
|
||||
"operationName": "UseMessages_ListMessages",
|
||||
"query": GRAPHQL_QUERY_MESSAGES,
|
||||
"variables": {"requestId": req_id, "updatedSince": updated_since}
|
||||
}
|
||||
|
||||
try:
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
messages = r.json().get("data", {}).get("messages", []) or []
|
||||
|
||||
if messages:
|
||||
with conn.cursor() as cur:
|
||||
for msg in messages:
|
||||
# Uložení zprávy
|
||||
sender = msg.get("sender") or {}
|
||||
sender_name = " ".join(filter(None, [sender.get("name"), sender.get("surname")]))
|
||||
mr = msg.get("medicalRecord") or {}
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_conversation (
|
||||
id, request_id, sender_name, sender_id, sender_clinic_id,
|
||||
text, created_at, read_at, updated_at,
|
||||
attachment_url, attachment_description, attachment_content_type
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
text = VALUES(text), updated_at = VALUES(updated_at), read_at = VALUES(read_at)
|
||||
""", (
|
||||
msg.get("id"), req_id, sender_name, sender.get("id"), sender.get("clinicId"),
|
||||
msg.get("text"), parse_dt(msg.get("createdAt")), parse_dt(msg.get("readAt")),
|
||||
parse_dt(msg.get("updatedAt")), mr.get("downloadUrl") or mr.get("url"),
|
||||
mr.get("description"), mr.get("contentType")
|
||||
))
|
||||
|
||||
# Uložení přílohy (pokud existuje a nemáme ji)
|
||||
attachment_id = mr.get("id")
|
||||
if attachment_id and attachment_id not in existing_ids:
|
||||
url = mr.get("downloadUrl") or mr.get("url")
|
||||
if url:
|
||||
att_r = requests.get(url, timeout=30)
|
||||
if att_r.status_code == 200:
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type,
|
||||
filename, content_type, file_size, created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
""", (
|
||||
req_id, attachment_id, "MESSAGE_ATTACHMENT",
|
||||
url.split("/")[-1].split("?")[0], mr.get("contentType"),
|
||||
len(att_r.content), parse_dt(msg.get("createdAt")), att_r.content
|
||||
))
|
||||
existing_ids.add(attachment_id)
|
||||
|
||||
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
else:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.3)
|
||||
except Exception as e:
|
||||
print(f" ❌ Chyba u {req_id}: {e}")
|
||||
|
||||
conn.close()
|
||||
print("\n🎉 Delta sync zpráv a příloh DOKONČEN")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Download all attachments for pozadavky where attachmentsProcessed IS NULL
|
||||
Store them in MySQL table `medevio_downloads` on 192.168.1.50.
|
||||
"""
|
||||
|
||||
import zlib
|
||||
import json
|
||||
import requests
|
||||
import pymysql
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT
|
||||
# ==============================
|
||||
def safe_print(text: str):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc or not enc.lower().startswith("utf"):
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🔧 CONFIGURATION (.50)
|
||||
# ==============================
|
||||
TOKEN_PATH = Path(__file__).resolve().parent.parent / "token.txt"
|
||||
CLINIC_SLUG = "mudr-buzalkova"
|
||||
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
"cursorclass": pymysql.cursors.DictCursor,
|
||||
}
|
||||
|
||||
CREATED_AFTER = "2024-12-01"
|
||||
|
||||
GRAPHQL_QUERY = r"""
|
||||
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
|
||||
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
|
||||
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
|
||||
patientRequestId: $requestId
|
||||
pageInfo: {first: 100, offset: 0}
|
||||
) {
|
||||
attachmentType
|
||||
id
|
||||
medicalRecord {
|
||||
contentType
|
||||
description
|
||||
downloadUrl
|
||||
id
|
||||
url
|
||||
visibleToPatient
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def extract_filename_from_url(url: str) -> str:
|
||||
try:
|
||||
return url.split("/")[-1].split("?")[0]
|
||||
except:
|
||||
return "unknown_filename"
|
||||
|
||||
|
||||
def read_token(p: Path) -> str:
|
||||
tok = p.read_text(encoding="utf-8").strip()
|
||||
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
|
||||
|
||||
|
||||
def main():
|
||||
token = read_token(TOKEN_PATH)
|
||||
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
# 1. Načtení ID již stažených příloh
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT attachment_id FROM medevio_downloads")
|
||||
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
|
||||
|
||||
safe_print(f"✅ V databázi již máme {len(existing_ids)} příloh.")
|
||||
|
||||
# 2. Výběr požadavků ke zpracování
|
||||
sql = "SELECT id, pacient_prijmeni, pacient_jmeno, createdAt FROM pozadavky WHERE attachmentsProcessed IS NULL"
|
||||
params = []
|
||||
if CREATED_AFTER:
|
||||
sql += " AND createdAt >= %s"
|
||||
params.append(CREATED_AFTER)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, params)
|
||||
req_rows = cur.fetchall()
|
||||
|
||||
safe_print(f"📋 Počet požadavků ke stažení příloh: {len(req_rows)}")
|
||||
|
||||
for i, row in enumerate(req_rows, 1):
|
||||
req_id = row["id"]
|
||||
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
|
||||
created_date = row.get("createdAt") or datetime.now()
|
||||
|
||||
safe_print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni} ({req_id})")
|
||||
|
||||
payload = {
|
||||
"operationName": "ClinicRequestDetail_GetPatientRequest2",
|
||||
"query": GRAPHQL_QUERY,
|
||||
"variables": {"requestId": req_id},
|
||||
}
|
||||
|
||||
try:
|
||||
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
|
||||
attachments = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
|
||||
|
||||
if attachments:
|
||||
with conn.cursor() as cur:
|
||||
for a in attachments:
|
||||
m = a.get("medicalRecord") or {}
|
||||
att_id = a.get("id")
|
||||
|
||||
if att_id in existing_ids:
|
||||
continue
|
||||
|
||||
url = m.get("downloadUrl")
|
||||
if url:
|
||||
att_r = requests.get(url, timeout=30)
|
||||
if att_r.status_code == 200:
|
||||
content = att_r.content
|
||||
filename = extract_filename_from_url(url)
|
||||
|
||||
cur.execute("""
|
||||
INSERT INTO medevio_downloads (
|
||||
request_id, attachment_id, attachment_type,
|
||||
filename, content_type, file_size,
|
||||
created_at, file_content
|
||||
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
|
||||
""", (req_id, att_id, a.get("attachmentType"), filename,
|
||||
m.get("contentType"), len(content), created_date, content))
|
||||
existing_ids.add(att_id)
|
||||
safe_print(f" 💾 Uloženo: {filename} ({len(content) / 1024:.1f} kB)")
|
||||
|
||||
conn.commit()
|
||||
|
||||
# Označíme jako zpracované i když nebyly nalezeny žádné přílohy
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
|
||||
conn.commit()
|
||||
|
||||
time.sleep(0.3)
|
||||
except Exception as e:
|
||||
print(f" ❌ Chyba u {req_id}: {e}")
|
||||
|
||||
conn.close()
|
||||
safe_print("\n🎯 Všechny přílohy byly zpracovány.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,232 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Force UTF-8 output even under Windows Task Scheduler
|
||||
import sys
|
||||
try:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stderr.reconfigure(encoding='utf-8')
|
||||
except AttributeError:
|
||||
# Python < 3.7 fallback (not needed for you, but safe)
|
||||
import io
|
||||
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
|
||||
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
|
||||
|
||||
# ==============================
|
||||
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
|
||||
# ==============================
|
||||
def safe_print(text: str = ""):
|
||||
enc = sys.stdout.encoding or ""
|
||||
if not enc.lower().startswith("utf"):
|
||||
# Strip emoji and characters outside BMP for Task Scheduler
|
||||
text = ''.join(ch for ch in text if ord(ch) < 65536)
|
||||
try:
|
||||
print(text)
|
||||
except UnicodeEncodeError:
|
||||
# ASCII fallback
|
||||
text = ''.join(ch for ch in text if ord(ch) < 128)
|
||||
print(text)
|
||||
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
"""Replace invalid filename characters with underscore."""
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def make_abbrev(title: str) -> str:
|
||||
if not title:
|
||||
return ""
|
||||
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
|
||||
abbr = ""
|
||||
for w in words:
|
||||
if w.isdigit():
|
||||
abbr += w
|
||||
else:
|
||||
abbr += w[0]
|
||||
return abbr.upper()
|
||||
|
||||
|
||||
# ==============================
|
||||
# 🧹 DELETE UNEXPECTED FILES
|
||||
# ==============================
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file():
|
||||
if f.name.startswith("▲"):
|
||||
continue
|
||||
sanitized = sanitize_name(f.name)
|
||||
if sanitized not in valid_files:
|
||||
safe_print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Could not delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📦 DB CONNECTION
|
||||
# ==============================
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
|
||||
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
|
||||
cur_blob = conn.cursor()
|
||||
|
||||
safe_print("🔍 Loading metadata from DB (FAST)…")
|
||||
|
||||
cur_meta.execute("""
|
||||
SELECT d.id AS download_id,
|
||||
d.request_id,
|
||||
d.filename,
|
||||
d.created_at,
|
||||
p.updatedAt AS req_updated_at,
|
||||
p.pacient_jmeno AS jmeno,
|
||||
p.pacient_prijmeni AS prijmeni,
|
||||
p.displayTitle
|
||||
FROM medevio_downloads d
|
||||
JOIN pozadavky p ON d.request_id = p.id
|
||||
WHERE p.updatedAt >= DATE_SUB(NOW(), INTERVAL 14 DAY)
|
||||
ORDER BY p.updatedAt DESC
|
||||
""")
|
||||
|
||||
rows = cur_meta.fetchall()
|
||||
safe_print(f"📋 Found {len(rows)} attachment records.\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP WITH PROGRESS
|
||||
# ==============================
|
||||
|
||||
# Group rows by request_id in Python — avoids N extra SELECT filename queries
|
||||
rows_by_request = defaultdict(list)
|
||||
for r in rows:
|
||||
rows_by_request[r["request_id"]].append(r)
|
||||
|
||||
total_requests = len(rows_by_request)
|
||||
safe_print(f"🔄 Processing {total_requests} unique requests...\n")
|
||||
|
||||
# Pre-index BASE_DIR once — avoids iterdir() called twice per request
|
||||
folder_list = [(f, f.name) for f in BASE_DIR.iterdir() if f.is_dir()]
|
||||
|
||||
for current_index, (req_id, req_rows) in enumerate(rows_by_request.items(), 1):
|
||||
percent = (current_index / total_requests) * 100
|
||||
safe_print(f"\n[ {percent:5.1f}% ] Processing request {current_index} / {total_requests} → {req_id}")
|
||||
|
||||
# ========== VALID FILENAMES from already-loaded rows ==========
|
||||
# original filename → sanitized name (needed for DB query later)
|
||||
file_map = {sanitize_name(r["filename"]): r["filename"] for r in req_rows}
|
||||
valid_files = set(file_map.keys())
|
||||
|
||||
# ========== BUILD FOLDER NAME ==========
|
||||
r = req_rows[0]
|
||||
updated_at = r["req_updated_at"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(r["jmeno"] or "")
|
||||
title = r.get("displayTitle") or ""
|
||||
abbr = make_abbrev(title)
|
||||
|
||||
clean_folder_name = sanitize_name(
|
||||
f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}"
|
||||
)
|
||||
|
||||
# ========== DETECT EXISTING FOLDER from pre-built index ==========
|
||||
req_id_str = str(req_id)
|
||||
matching = [f for f, name in folder_list if req_id_str in name]
|
||||
existing_folder = matching[0] if matching else None
|
||||
|
||||
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
|
||||
|
||||
# ========== MERGE DUPLICATES ==========
|
||||
possible_dups = [f for f, name in folder_list if req_id_str in name and f != main_folder]
|
||||
|
||||
for dup in possible_dups:
|
||||
safe_print(f"♻️ Merging duplicate folder: {dup.name}")
|
||||
|
||||
clean_folder(dup, valid_files)
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in dup.iterdir():
|
||||
if f.is_file():
|
||||
target = main_folder / f.name
|
||||
if not target.exists():
|
||||
f.rename(target)
|
||||
|
||||
shutil.rmtree(dup, ignore_errors=True)
|
||||
|
||||
# ========== CLEAN MAIN FOLDER ==========
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# ========== DOWNLOAD MISSING FILES (batch blob fetch per request) ==========
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
added_new_file = False
|
||||
|
||||
missing_san = [
|
||||
fn for fn in valid_files
|
||||
if not (main_folder / fn).exists() and not (main_folder / ("▲" + fn)).exists()
|
||||
]
|
||||
|
||||
if missing_san:
|
||||
# Fetch all missing blobs in a single query instead of one per file
|
||||
missing_orig = [file_map[fn] for fn in missing_san]
|
||||
placeholders = ",".join(["%s"] * len(missing_orig))
|
||||
cur_blob.execute(
|
||||
f"SELECT filename, file_content FROM medevio_downloads "
|
||||
f"WHERE request_id=%s AND filename IN ({placeholders})",
|
||||
[req_id] + missing_orig,
|
||||
)
|
||||
for blob_filename, content in cur_blob.fetchall():
|
||||
if not content:
|
||||
continue
|
||||
dest_plain = main_folder / sanitize_name(blob_filename)
|
||||
with open(dest_plain, "wb") as fh:
|
||||
fh.write(content)
|
||||
safe_print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
|
||||
added_new_file = True
|
||||
|
||||
# ========== REMOVE ▲ FLAG IF NEW FILES ADDED ==========
|
||||
if added_new_file and "▲" in main_folder.name:
|
||||
new_name = main_folder.name.replace("▲", "").strip()
|
||||
new_path = main_folder.parent / new_name
|
||||
|
||||
if new_path != main_folder:
|
||||
try:
|
||||
main_folder.rename(new_path)
|
||||
safe_print(f"🔄 Folder flag ▲ removed → {new_name}")
|
||||
main_folder = new_path
|
||||
except Exception as e:
|
||||
safe_print(f"⚠️ Could not rename folder: {e}")
|
||||
|
||||
safe_print("\n🎯 Export complete.\n")
|
||||
|
||||
cur_blob.close()
|
||||
cur_meta.close()
|
||||
conn.close()
|
||||
@@ -0,0 +1,146 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import pymysql
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# ==============================
|
||||
# ⚙️ CONFIGURATION
|
||||
# ==============================
|
||||
DB_CONFIG = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3306,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "medevio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def sanitize_name(name: str) -> str:
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
|
||||
|
||||
|
||||
def clean_folder(folder: Path, valid_files: set):
|
||||
"""Remove files that do NOT exist in MySQL for this request."""
|
||||
if not folder.exists():
|
||||
return
|
||||
|
||||
for f in folder.iterdir():
|
||||
if f.is_file() and sanitize_name(f.name) not in valid_files:
|
||||
print(f"🗑️ Removing unexpected file: {f.name}")
|
||||
try:
|
||||
f.unlink()
|
||||
except Exception as e:
|
||||
print(f"⚠️ Cannot delete {f}: {e}")
|
||||
|
||||
|
||||
# ==============================
|
||||
# 📥 LOAD EVERYTHING IN ONE QUERY
|
||||
# ==============================
|
||||
conn = pymysql.connect(**DB_CONFIG)
|
||||
cur = conn.cursor(pymysql.cursors.DictCursor)
|
||||
|
||||
print("📥 Loading ALL metadata + BLOBs with ONE MySQL query…")
|
||||
|
||||
cur.execute("""
|
||||
SELECT
|
||||
d.id AS download_id,
|
||||
d.request_id,
|
||||
d.filename,
|
||||
d.file_content,
|
||||
p.updatedAt AS req_updated_at,
|
||||
p.pacient_jmeno AS jmeno,
|
||||
p.pacient_prijmeni AS prijmeni
|
||||
FROM medevio_downloads d
|
||||
JOIN pozadavky p ON d.request_id = p.id
|
||||
ORDER BY p.updatedAt DESC, d.created_at ASC
|
||||
""")
|
||||
|
||||
rows = cur.fetchall()
|
||||
print(f"📦 Loaded {len(rows)} total file rows.\n")
|
||||
|
||||
conn.close()
|
||||
|
||||
# ==============================
|
||||
# 🔄 ORGANIZE ROWS PER REQUEST
|
||||
# ==============================
|
||||
requests = {} # req_id → list of file dicts
|
||||
|
||||
for r in rows:
|
||||
req_id = r["request_id"]
|
||||
if req_id not in requests:
|
||||
requests[req_id] = []
|
||||
requests[req_id].append(r)
|
||||
|
||||
print(f"📌 Unique requests: {len(requests)}\n")
|
||||
|
||||
# ==============================
|
||||
# 🧠 MAIN LOOP – SAME LOGIC AS BEFORE
|
||||
# ==============================
|
||||
for req_id, filelist in requests.items():
|
||||
|
||||
# ========== GET UPDATEDAT (same logic) ==========
|
||||
any_row = filelist[0]
|
||||
updated_at = any_row["req_updated_at"] or datetime.now()
|
||||
date_str = updated_at.strftime("%Y-%m-%d")
|
||||
|
||||
prijmeni = sanitize_name(any_row["prijmeni"] or "Unknown")
|
||||
jmeno = sanitize_name(any_row["jmeno"] or "")
|
||||
|
||||
folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} {req_id}")
|
||||
main_folder = BASE_DIR / folder_name
|
||||
|
||||
# ========== VALID FILES ==========
|
||||
valid_files = {sanitize_name(r["filename"]) for r in filelist}
|
||||
|
||||
# ========== FIND OLD FOLDERS ==========
|
||||
possible_dups = [
|
||||
f for f in BASE_DIR.iterdir()
|
||||
if f.is_dir() and req_id in f.name and f != main_folder
|
||||
]
|
||||
|
||||
# ========== MERGE OLD FOLDERS ==========
|
||||
for dup in possible_dups:
|
||||
print(f"♻️ Merging folder: {dup.name}")
|
||||
|
||||
clean_folder(dup, valid_files)
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for f in dup.iterdir():
|
||||
if f.is_file():
|
||||
target = main_folder / f.name
|
||||
if not target.exists():
|
||||
f.rename(target)
|
||||
|
||||
shutil.rmtree(dup, ignore_errors=True)
|
||||
|
||||
# ========== CLEAN MAIN FOLDER ==========
|
||||
main_folder.mkdir(parents=True, exist_ok=True)
|
||||
clean_folder(main_folder, valid_files)
|
||||
|
||||
# ========== SAVE FILES (fast now) ==========
|
||||
for r in filelist:
|
||||
filename = sanitize_name(r["filename"])
|
||||
dest = main_folder / filename
|
||||
|
||||
if dest.exists():
|
||||
continue
|
||||
|
||||
content = r["file_content"]
|
||||
if not content:
|
||||
continue
|
||||
|
||||
with open(dest, "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
print(f"💾 Saved: {dest.relative_to(BASE_DIR)}")
|
||||
|
||||
print("\n🎯 Export complete.\n")
|
||||
Reference in New Issue
Block a user