Compare commits

...

9 Commits

Author SHA1 Message Date
7c08ad8e35 Z230 2026-01-29 11:34:52 +01:00
30d64680be notebook 2026-01-28 07:26:08 +01:00
55e723788b notebook 2026-01-16 17:06:54 +01:00
2d2a60a845 Z230 2026-01-16 15:34:12 +01:00
186c98fd0d Z230 2026-01-07 08:30:39 +01:00
d1bfe92e28 Z230 2025-12-14 21:30:10 +01:00
21c11d2336 Remove PyCharm config from repo and add proper .gitignore 2025-12-14 21:29:23 +01:00
a95f8ae1f9 Z230 2025-12-14 21:23:43 +01:00
139e867a6d Z230 2025-12-14 21:22:42 +01:00
25 changed files with 2436 additions and 96 deletions

13
.gitignore vendored Normal file
View File

@@ -0,0 +1,13 @@
# Virtual environment
.venv/
# Python
__pycache__/
*.pyc
# PyCharm / IDE
.idea/
# OS
.DS_Store
Thumbs.db

3
.idea/.gitignore generated vendored
View File

@@ -1,3 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml

10
.idea/Medevio.iml generated
View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" />
</content>
<orderEntry type="jdk" jdkName="Python 3.13 (Medevio)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -1,6 +0,0 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml generated
View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 (Medevio)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (Medevio)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated
View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/Medevio.iml" filepath="$PROJECT_DIR$/.idea/Medevio.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated
View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

View File

@@ -0,0 +1,293 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Delta sync Medevio communication.
Stáhne pouze zprávy změněné po messagesProcessed pro každý požadavek.
"""
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
import sys
# ==============================
# UTF-8 SAFE OUTPUT
# ==============================
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# CONFIG
# ==============================
TOKEN_PATH = Path("token.txt")
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY_MESSAGES = r"""
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
messages: listMessages(
patientRequestId: $requestId,
updatedSince: $updatedSince
) {
id
createdAt
updatedAt
readAt
text
type
sender {
id
name
surname
clinicId
}
medicalRecord {
id
description
contentType
url
downloadUrl
createdAt
updatedAt
}
}
}
"""
# ==============================
# HELPERS
# ==============================
def parse_dt(s):
if not s:
return None
try:
return datetime.fromisoformat(s.replace("Z", "+00:00"))
except Exception:
return None
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
return tok.replace("Bearer ", "")
# ==============================
# FETCH MESSAGES (DELTA)
# ==============================
def fetch_messages(headers, request_id, updated_since):
payload = {
"operationName": "UseMessages_ListMessages",
"query": GRAPHQL_QUERY_MESSAGES,
"variables": {
"requestId": request_id,
"updatedSince": updated_since,
},
}
r = requests.post(
"https://api.medevio.cz/graphql",
json=payload,
headers=headers,
timeout=30
)
if r.status_code != 200:
safe_print(f"❌ HTTP {r.status_code} for request {request_id}")
return []
j = r.json()
if "errors" in j:
safe_print(f"❌ GraphQL error for {request_id}: {j['errors']}")
return []
return j.get("data", {}).get("messages", []) or []
# ==============================
# INSERT MESSAGE
# ==============================
def insert_message(cur, req_id, msg):
sender = msg.get("sender") or {}
sender_name = " ".join(
x for x in [sender.get("name"), sender.get("surname")] if x
) or None
mr = msg.get("medicalRecord") or {}
sql = """
INSERT INTO medevio_conversation (
id, request_id,
sender_name, sender_id, sender_clinic_id,
text, created_at, read_at, updated_at,
attachment_url, attachment_description, attachment_content_type
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
sender_name = VALUES(sender_name),
sender_id = VALUES(sender_id),
sender_clinic_id = VALUES(sender_clinic_id),
text = VALUES(text),
created_at = VALUES(created_at),
read_at = VALUES(read_at),
updated_at = VALUES(updated_at),
attachment_url = VALUES(attachment_url),
attachment_description = VALUES(attachment_description),
attachment_content_type = VALUES(attachment_content_type)
"""
cur.execute(sql, (
msg.get("id"),
req_id,
sender_name,
sender.get("id"),
sender.get("clinicId"),
msg.get("text"),
parse_dt(msg.get("createdAt")),
parse_dt(msg.get("readAt")),
parse_dt(msg.get("updatedAt")),
mr.get("downloadUrl") or mr.get("url"),
mr.get("description"),
mr.get("contentType")
))
# ==============================
# INSERT ATTACHMENT (DEDUP)
# ==============================
def insert_download(cur, req_id, msg, existing_ids):
mr = msg.get("medicalRecord") or {}
attachment_id = mr.get("id")
if not attachment_id or attachment_id in existing_ids:
return
url = mr.get("downloadUrl") or mr.get("url")
if not url:
return
try:
r = requests.get(url, timeout=30)
r.raise_for_status()
data = r.content
except Exception as e:
safe_print(f"⚠️ Attachment download failed: {e}")
return
filename = url.split("/")[-1].split("?")[0]
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
filename, content_type, file_size, created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
file_content = VALUES(file_content),
file_size = VALUES(file_size),
downloaded_at = NOW()
""", (
req_id,
attachment_id,
"MESSAGE_ATTACHMENT",
filename,
mr.get("contentType"),
len(data),
parse_dt(msg.get("createdAt")),
data
))
existing_ids.add(attachment_id)
# ==============================
# MAIN
# ==============================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
# existing attachments
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {r["attachment_id"] for r in cur.fetchall()}
# select requests needing sync
with conn.cursor() as cur:
cur.execute("""
SELECT id, messagesProcessed
FROM pozadavky
WHERE messagesProcessed IS NULL
OR messagesProcessed < updatedAt
""")
rows = cur.fetchall()
safe_print(f"📋 Found {len(rows)} requests for message delta-sync\n")
for i, row in enumerate(rows, 1):
req_id = row["id"]
updated_since = row["messagesProcessed"]
if updated_since:
updated_since = updated_since.replace(microsecond=0).isoformat() + "Z"
safe_print(f"[{i}/{len(rows)}] {req_id}")
messages = fetch_messages(headers, req_id, updated_since)
if not messages:
safe_print(" ⏭ No new messages")
else:
with conn.cursor() as cur:
for msg in messages:
insert_message(cur, req_id, msg)
insert_download(cur, req_id, msg, existing_ids)
conn.commit()
safe_print(f"{len(messages)} new/updated messages")
with conn.cursor() as cur:
cur.execute(
"UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s",
(req_id,)
)
conn.commit()
time.sleep(0.25)
conn.close()
safe_print("\n🎉 Delta message sync DONE")
# ==============================
if __name__ == "__main__":
main()

View File

@@ -49,7 +49,7 @@ DB_CONFIG = {
"charset": "utf8mb4", "charset": "utf8mb4",
} }
BASE_DIR = Path(r"z:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP") BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True) BASE_DIR.mkdir(parents=True, exist_ok=True)

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
import time
import sys
# ================================
# UTF-8 SAFE OUTPUT (Windows friendly)
# ================================
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ================================
# 🔧 CONFIG
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500
STATES = ["ACTIVE", "DONE"] # explicitně jinak API vrací jen ACTIVE
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATETIME PARSER
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=datetime.now().astimezone().tzinfo)
return dt.astimezone().strftime("%Y-%m-%d %H:%M:%S")
except Exception:
return None
# ================================
# UPSERT
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
api_updated = to_mysql_dt(r.get("updatedAt"))
msg_updated = to_mysql_dt((r.get("lastMessage") or {}).get("createdAt"))
final_updated = max(filter(None, [api_updated, msg_updated]), default=None)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
final_updated,
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH PAGE (per state)
# ================================
def fetch_state(headers, state, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"state": state,
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
safe_print(f"\n=== FULL Medevio READ-ALL sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
grand_total = 0
for state in STATES:
safe_print(f"\n🔁 STATE = {state}")
offset = 0
total = None
processed = 0
while True:
batch, count = fetch_state(headers, state, offset)
if total is None:
total = count
safe_print(f"📡 {state}: celkem {total}")
if not batch:
break
for r in batch:
upsert(conn, r)
processed += len(batch)
safe_print(f"{processed}/{total}")
offset += BATCH_SIZE
if offset >= count:
break
time.sleep(0.4)
grand_total += processed
conn.close()
safe_print(f"\n✅ HOTOVO celkem zpracováno {grand_total} požadavků\n")
# ================================
if __name__ == "__main__":
main()

View File

@@ -21,7 +21,7 @@ import argparse
# ============================== # ==============================
# 🔧 CONFIGURATION # 🔧 CONFIGURATION
# ============================== # ==============================
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path("../10ReadPozadavky/token.txt")
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",

View File

@@ -0,0 +1 @@
{"cookies": [{"name": "gateway-access-token", "value": "YwBgkf8McREDKs7vCZj0EZD2fJsuV8RyDPtYx7WiDoz0nFJ9kxId8kcNEPBLFSwM+Tiz80+SOdFwo+oj", "domain": "my.medevio.cz", "path": "/", "expires": 1763372319, "httpOnly": false, "secure": false, "sameSite": "Lax"}, {"name": "aws-waf-token", "value": "b6a1d4eb-4350-40e5-8e52-1f5f9600fbb8:CgoAr9pC8c6zAAAA:OYwXLY5OyitSQPl5v2oIlS+hIxsrb5LxV4VjCyE2gJCFFE5PQu+0Zbxse2ZIofrNv5QKs0TYUDTmxPhZyTr9Qtjnq2gsVQxWHXzrbebv3Z7RbzB63u6Ymn3Fo8IbDev3CfCNcNuxCKltFEXLqSCjI2vqNY+7HZkgQBIqy2wMgzli3aSLq0w8lWYtZzyyot7q8RPXWMGTfaBUo2reY0SOSffm9rAivE9PszNfPid71CvNrGAAoxRbwb25eVujlyIcDVWe5vZ9Iw==", "domain": ".my.medevio.cz", "path": "/", "expires": 1761125920, "httpOnly": false, "secure": true, "sameSite": "Lax"}], "origins": [{"origin": "https://my.medevio.cz", "localStorage": [{"name": "awswaf_token_refresh_timestamp", "value": "1760780309860"}, {"name": "awswaf_session_storage", "value": "b6a1d4eb-4350-40e5-8e52-1f5f9600fbb8:CgoAr9pC8c+zAAAA:+vw//1NzmePjPpbGCJzUB+orCRivtJd098DbDX4AnABiGRw/+ql6ShqvFY4YdCY7w2tegb5mEPBdAmc4sNi22kNR9BuEoAgCUiMhkU1AZWfzM51zPfTh7SveCrREZ7xdvxcqKPMmfVLRYX5E4+UWh22z/LKQ7+d9VERp3J+wWCUW3dFFirkezy3N7b2FVjTlY/RxsZwhejQziTG/L3CkIFFP3mOReNgBvDpj7aKoM1knY4IL4TZ8E7zNv3nTsvzACLYvnUutVOUcofN1TfOzwZshSKsEXsMzrQn8PzLccX1jM5VSzce7gfEzl0zSPsT8NB3Sna+rhMIttDNYgvbW1HsfG2LIeKMR27Zf8hkslDRVVkcU/Kp2jLOEdhhrBKGjKY2o9/uX3NExdzh5MEKQSSRtmue01BpWYILPH23rMsz4YSmF+Ough5OeQoC95rkcYwVXMhwvUN9Zfp9UZ4xCNfFUex5dOrg9aJntYRnaceeocGUttNI5AdT0i3+osV6XHXzKxeqO8zLCS9BIsCzxaHfdqqem5DorMceuGKz+QqksatIQAA=="}, {"name": "Application.Intl.locale", "value": "cs"}, {"name": "Password.prefill", "value": "{\"username\":\"vladimir.buzalka@buzalka.cz\",\"type\":\"email\"}"}]}]}

View File

@@ -0,0 +1 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime, timezone
import time
from dateutil import parser
import sys
# Force UTF-8 output
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ================================
# 🔧 CONFIGURATION
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 100
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ⭐ NOVÝ TESTOVANÝ DOTAZ obsahuje lastMessage.createdAt
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# 🧿 SAFE DATETIME PARSER (ALWAYS UTC → LOCAL)
# ================================
def to_mysql_dt_utc(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
dt_local = dt.astimezone()
return dt_local.strftime("%Y-%m-%d %H:%M:%S")
except:
return None
# ================================
# 🔑 TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# 💾 UPSERT
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
api_updated = to_mysql_dt_utc(r.get("updatedAt"))
last_msg = r.get("lastMessage") or {}
msg_updated = to_mysql_dt_utc(last_msg.get("createdAt"))
def max_dt(a, b):
if a and b:
return max(a, b)
return a or b
final_updated = max_dt(api_updated, msg_updated)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt_utc(r.get("createdAt")),
final_updated,
to_mysql_dt_utc(r.get("doneAt")),
to_mysql_dt_utc(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# 📡 FETCH ACTIVE PAGE
# ================================
def fetch_active(headers, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
"state": "ACTIVE",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json().get("data", {}).get("requestsResponse", {})
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# 🧠 MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
print(f"\n=== Sync ACTIVE požadavků @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
offset = 0
total_processed = 0
total_count = None
while True:
batch, count = fetch_active(headers, offset)
if total_count is None:
total_count = count
print(f"📡 Celkem ACTIVE v Medevio: {count}")
if not batch:
break
for r in batch:
upsert(conn, r)
total_processed += len(batch)
print(f"{total_processed}/{total_count} ACTIVE processed")
if offset + BATCH_SIZE >= count:
break
offset += BATCH_SIZE
time.sleep(0.4)
conn.close()
print("\n✅ ACTIVE sync hotovo!\n")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pymysql
import requests
from pathlib import Path
from datetime import datetime
from dateutil import parser
import time
import sys
# ================================
# UTF-8 SAFE OUTPUT (Windows friendly)
# ================================
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ================================
# 🔧 CONFIG
# ================================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
BATCH_SIZE = 500
STATES = ["ACTIVE", "DONE"] # explicitně jinak API vrací jen ACTIVE
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY = r"""
query ClinicRequestList2(
$clinicSlug: String!,
$queueId: String,
$queueAssignment: QueueAssignmentFilter!,
$state: PatientRequestState,
$pageInfo: PageInfo!,
$locale: Locale!
) {
requestsResponse: listPatientRequestsForClinic2(
clinicSlug: $clinicSlug,
queueId: $queueId,
queueAssignment: $queueAssignment,
state: $state,
pageInfo: $pageInfo
) {
count
patientRequests {
id
displayTitle(locale: $locale)
createdAt
updatedAt
doneAt
removedAt
extendedPatient {
name
surname
identificationNumber
}
lastMessage {
createdAt
}
}
}
}
"""
# ================================
# TOKEN
# ================================
def read_token(path: Path) -> str:
tok = path.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
# ================================
# DATETIME PARSER
# ================================
def to_mysql_dt(iso_str):
if not iso_str:
return None
try:
dt = parser.isoparse(iso_str)
if dt.tzinfo is None:
dt = dt.replace(tzinfo=datetime.now().astimezone().tzinfo)
return dt.astimezone().strftime("%Y-%m-%d %H:%M:%S")
except Exception:
return None
# ================================
# UPSERT
# ================================
def upsert(conn, r):
p = r.get("extendedPatient") or {}
api_updated = to_mysql_dt(r.get("updatedAt"))
msg_updated = to_mysql_dt((r.get("lastMessage") or {}).get("createdAt"))
final_updated = max(filter(None, [api_updated, msg_updated]), default=None)
sql = """
INSERT INTO pozadavky (
id, displayTitle, createdAt, updatedAt, doneAt, removedAt,
pacient_jmeno, pacient_prijmeni, pacient_rodnecislo
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
displayTitle=VALUES(displayTitle),
updatedAt=VALUES(updatedAt),
doneAt=VALUES(doneAt),
removedAt=VALUES(removedAt),
pacient_jmeno=VALUES(pacient_jmeno),
pacient_prijmeni=VALUES(pacient_prijmeni),
pacient_rodnecislo=VALUES(pacient_rodnecislo)
"""
vals = (
r.get("id"),
r.get("displayTitle"),
to_mysql_dt(r.get("createdAt")),
final_updated,
to_mysql_dt(r.get("doneAt")),
to_mysql_dt(r.get("removedAt")),
p.get("name"),
p.get("surname"),
p.get("identificationNumber"),
)
with conn.cursor() as cur:
cur.execute(sql, vals)
conn.commit()
# ================================
# FETCH PAGE (per state)
# ================================
def fetch_state(headers, state, offset):
variables = {
"clinicSlug": CLINIC_SLUG,
"queueId": None,
"queueAssignment": "ANY",
"state": state,
"pageInfo": {"first": BATCH_SIZE, "offset": offset},
"locale": "cs",
}
payload = {
"operationName": "ClinicRequestList2",
"query": GRAPHQL_QUERY,
"variables": variables,
}
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers)
r.raise_for_status()
data = r.json()["data"]["requestsResponse"]
return data.get("patientRequests", []), data.get("count", 0)
# ================================
# MAIN
# ================================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
safe_print(f"\n=== FULL Medevio READ-ALL sync @ {datetime.now():%Y-%m-%d %H:%M:%S} ===")
grand_total = 0
for state in STATES:
safe_print(f"\n🔁 STATE = {state}")
offset = 0
total = None
processed = 0
while True:
batch, count = fetch_state(headers, state, offset)
if total is None:
total = count
safe_print(f"📡 {state}: celkem {total}")
if not batch:
break
for r in batch:
upsert(conn, r)
processed += len(batch)
safe_print(f"{processed}/{total}")
offset += BATCH_SIZE
if offset >= count:
break
time.sleep(0.4)
grand_total += processed
conn.close()
safe_print(f"\n✅ HOTOVO celkem zpracováno {grand_total} požadavků\n")
# ================================
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,217 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Download and store Medevio questionnaires (userNote + eCRF) for all patient requests.
Uses the verified working query "GetPatientRequest2".
"""
import json
import requests
import pymysql
from datetime import datetime
from pathlib import Path
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# 🔧 CONFIGURATION (UPDATED TO 192.168.1.50)
# ==============================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
GRAPHQL_URL = "https://api.medevio.cz/graphql"
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
# ==============================
# 🕒 DATETIME FIXER
# ==============================
def fix_datetime(dt_str):
if not dt_str:
return None
try:
return datetime.fromisoformat(dt_str.replace("Z", "").replace("+00:00", ""))
except Exception:
return None
# Optional filter
CREATED_AFTER = "2025-01-01"
# ==============================
# 🧮 HELPERS
# ==============================
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
if tok.startswith("Bearer "):
return tok.split(" ", 1)[1]
return tok
GRAPHQL_QUERY = r"""
query GetPatientRequest2($requestId: UUID!, $clinicSlug: String!, $locale: Locale!) {
request: getPatientRequest2(patientRequestId: $requestId, clinicSlug: $clinicSlug) {
id
displayTitle(locale: $locale)
createdAt
updatedAt
userNote
eventType
extendedPatient(clinicSlug: $clinicSlug) {
name
surname
identificationNumber
}
ecrfFilledData(locale: $locale) {
name
groups {
label
fields {
name
label
type
value
}
}
}
}
}
"""
def fetch_questionnaire(headers, request_id, clinic_slug):
payload = {
"operationName": "GetPatientRequest2",
"query": GRAPHQL_QUERY,
"variables": {
"requestId": request_id,
"clinicSlug": clinic_slug,
"locale": "cs",
},
}
r = requests.post(GRAPHQL_URL, json=payload, headers=headers, timeout=40)
if r.status_code != 200:
safe_print(f"❌ HTTP {r.status_code} for {request_id}: {r.text}")
return None
return r.json().get("data", {}).get("request")
def insert_questionnaire(cur, req):
if not req:
return
patient = req.get("extendedPatient") or {}
ecrf_data = req.get("ecrfFilledData")
created_at = fix_datetime(req.get("createdAt"))
updated_at = fix_datetime(req.get("updatedAt"))
cur.execute("""
INSERT INTO medevio_questionnaires (
request_id, created_at, updated_at, user_note, ecrf_json
)
VALUES (%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
updated_at = VALUES(updated_at),
user_note = VALUES(user_note),
ecrf_json = VALUES(ecrf_json),
updated_local = NOW()
""", (
req.get("id"),
created_at,
updated_at,
req.get("userNote"),
json.dumps(ecrf_data, ensure_ascii=False),
))
safe_print(f" 💾 Stored questionnaire for {patient.get('surname','')} {patient.get('name','')}")
# ==============================
# 🧠 MAIN
# ==============================
def main():
token = read_token(TOKEN_PATH)
headers = {
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
"Accept": "application/json",
}
conn = pymysql.connect(**DB_CONFIG)
# load list of requests from the table we just filled
with conn.cursor() as cur:
sql = """
SELECT id, pacient_jmeno, pacient_prijmeni, createdAt, updatedAt, questionnaireprocessed
FROM pozadavky
WHERE (questionnaireprocessed IS NULL OR questionnaireprocessed < updatedAt)
"""
if CREATED_AFTER:
sql += " AND createdAt >= %s"
cur.execute(sql, (CREATED_AFTER,))
else:
cur.execute(sql)
rows = cur.fetchall()
safe_print(f"📋 Found {len(rows)} requests needing questionnaire check.")
for i, row in enumerate(rows, 1):
req_id = row["id"]
safe_print(f"\n[{i}/{len(rows)}] 🔍 Fetching questionnaire for {req_id} ...")
req = fetch_questionnaire(headers, req_id, CLINIC_SLUG)
if not req:
safe_print(" ⚠️ No questionnaire data found.")
continue
with conn.cursor() as cur:
insert_questionnaire(cur, req)
cur.execute(
"UPDATE pozadavky SET questionnaireprocessed = NOW() WHERE id = %s",
(req_id,)
)
conn.commit()
time.sleep(0.6)
conn.close()
safe_print("\n✅ Done! All questionnaires stored in MySQL table `medevio_questionnaires`.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,147 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
import sys
# UTF-8 SAFE OUTPUT
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
pass
# ==============================
# CONFIG (.50)
# ==============================
TOKEN_PATH = Path("token.txt")
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
GRAPHQL_QUERY_MESSAGES = r"""
query UseMessages_ListMessages($requestId: String!, $updatedSince: DateTime) {
messages: listMessages(patientRequestId: $requestId, updatedSince: $updatedSince) {
id createdAt updatedAt readAt text type
sender { id name surname clinicId }
medicalRecord { id description contentType url downloadUrl createdAt updatedAt }
}
}
"""
def parse_dt(s):
if not s: return None
try: return datetime.fromisoformat(s.replace("Z", "+00:00"))
except: return None
def read_token(path: Path) -> str:
return path.read_text(encoding="utf-8").strip().replace("Bearer ", "")
def main():
token = read_token(TOKEN_PATH)
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
conn = pymysql.connect(**DB_CONFIG)
# 1. Seznam již stažených příloh (prevence duplicit)
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {r["attachment_id"] for r in cur.fetchall()}
# 2. Seznam požadavků k synchronizaci
with conn.cursor() as cur:
cur.execute("""
SELECT id, messagesProcessed FROM pozadavky
WHERE messagesProcessed IS NULL OR messagesProcessed < updatedAt
""")
rows = cur.fetchall()
print(f"📋 Počet požadavků k synchronizaci zpráv: {len(rows)}")
for i, row in enumerate(rows, 1):
req_id = row["id"]
updated_since = row["messagesProcessed"]
if updated_since:
updated_since = updated_since.replace(microsecond=0).isoformat() + "Z"
print(f"[{i}/{len(rows)}] Synchronizuji: {req_id}")
payload = {
"operationName": "UseMessages_ListMessages",
"query": GRAPHQL_QUERY_MESSAGES,
"variables": {"requestId": req_id, "updatedSince": updated_since}
}
try:
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
messages = r.json().get("data", {}).get("messages", []) or []
if messages:
with conn.cursor() as cur:
for msg in messages:
# Uložení zprávy
sender = msg.get("sender") or {}
sender_name = " ".join(filter(None, [sender.get("name"), sender.get("surname")]))
mr = msg.get("medicalRecord") or {}
cur.execute("""
INSERT INTO medevio_conversation (
id, request_id, sender_name, sender_id, sender_clinic_id,
text, created_at, read_at, updated_at,
attachment_url, attachment_description, attachment_content_type
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
ON DUPLICATE KEY UPDATE
text = VALUES(text), updated_at = VALUES(updated_at), read_at = VALUES(read_at)
""", (
msg.get("id"), req_id, sender_name, sender.get("id"), sender.get("clinicId"),
msg.get("text"), parse_dt(msg.get("createdAt")), parse_dt(msg.get("readAt")),
parse_dt(msg.get("updatedAt")), mr.get("downloadUrl") or mr.get("url"),
mr.get("description"), mr.get("contentType")
))
# Uložení přílohy (pokud existuje a nemáme ji)
attachment_id = mr.get("id")
if attachment_id and attachment_id not in existing_ids:
url = mr.get("downloadUrl") or mr.get("url")
if url:
att_r = requests.get(url, timeout=30)
if att_r.status_code == 200:
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
filename, content_type, file_size, created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
""", (
req_id, attachment_id, "MESSAGE_ATTACHMENT",
url.split("/")[-1].split("?")[0], mr.get("contentType"),
len(att_r.content), parse_dt(msg.get("createdAt")), att_r.content
))
existing_ids.add(attachment_id)
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
else:
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET messagesProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
time.sleep(0.3)
except Exception as e:
print(f" ❌ Chyba u {req_id}: {e}")
conn.close()
print("\n🎉 Delta sync zpráv a příloh DOKONČEN")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,177 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Download all attachments for pozadavky where attachmentsProcessed IS NULL
Store them in MySQL table `medevio_downloads` on 192.168.1.50.
"""
import zlib
import json
import requests
import pymysql
from pathlib import Path
from datetime import datetime
import time
import sys
# Force UTF-8 output
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
pass
# ==============================
# 🛡 SAFE PRINT
# ==============================
def safe_print(text: str):
enc = sys.stdout.encoding or ""
if not enc or not enc.lower().startswith("utf"):
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# 🔧 CONFIGURATION (.50)
# ==============================
TOKEN_PATH = Path("token.txt")
CLINIC_SLUG = "mudr-buzalkova"
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
"cursorclass": pymysql.cursors.DictCursor,
}
CREATED_AFTER = "2024-12-01"
GRAPHQL_QUERY = r"""
query ClinicRequestDetail_GetPatientRequest2($requestId: UUID!) {
patientRequestMedicalRecords: listMedicalRecordsForPatientRequest(
attachmentTypes: [ECRF_FILL_ATTACHMENT, MESSAGE_ATTACHMENT, PATIENT_REQUEST_ATTACHMENT]
patientRequestId: $requestId
pageInfo: {first: 100, offset: 0}
) {
attachmentType
id
medicalRecord {
contentType
description
downloadUrl
id
url
visibleToPatient
}
}
}
"""
def extract_filename_from_url(url: str) -> str:
try:
return url.split("/")[-1].split("?")[0]
except:
return "unknown_filename"
def read_token(p: Path) -> str:
tok = p.read_text(encoding="utf-8").strip()
return tok.split(" ", 1)[1] if tok.startswith("Bearer ") else tok
def main():
token = read_token(TOKEN_PATH)
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
conn = pymysql.connect(**DB_CONFIG)
# 1. Načtení ID již stažených příloh
with conn.cursor() as cur:
cur.execute("SELECT attachment_id FROM medevio_downloads")
existing_ids = {row["attachment_id"] for row in cur.fetchall()}
safe_print(f"✅ V databázi již máme {len(existing_ids)} příloh.")
# 2. Výběr požadavků ke zpracování
sql = "SELECT id, pacient_prijmeni, pacient_jmeno, createdAt FROM pozadavky WHERE attachmentsProcessed IS NULL"
params = []
if CREATED_AFTER:
sql += " AND createdAt >= %s"
params.append(CREATED_AFTER)
with conn.cursor() as cur:
cur.execute(sql, params)
req_rows = cur.fetchall()
safe_print(f"📋 Počet požadavků ke stažení příloh: {len(req_rows)}")
for i, row in enumerate(req_rows, 1):
req_id = row["id"]
prijmeni = row.get("pacient_prijmeni") or "Neznamy"
created_date = row.get("createdAt") or datetime.now()
safe_print(f"\n[{i}/{len(req_rows)}] 🧾 {prijmeni} ({req_id})")
payload = {
"operationName": "ClinicRequestDetail_GetPatientRequest2",
"query": GRAPHQL_QUERY,
"variables": {"requestId": req_id},
}
try:
r = requests.post("https://api.medevio.cz/graphql", json=payload, headers=headers, timeout=30)
attachments = r.json().get("data", {}).get("patientRequestMedicalRecords", [])
if attachments:
with conn.cursor() as cur:
for a in attachments:
m = a.get("medicalRecord") or {}
att_id = a.get("id")
if att_id in existing_ids:
continue
url = m.get("downloadUrl")
if url:
att_r = requests.get(url, timeout=30)
if att_r.status_code == 200:
content = att_r.content
filename = extract_filename_from_url(url)
cur.execute("""
INSERT INTO medevio_downloads (
request_id, attachment_id, attachment_type,
filename, content_type, file_size,
created_at, file_content
) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)
""", (req_id, att_id, a.get("attachmentType"), filename,
m.get("contentType"), len(content), created_date, content))
existing_ids.add(att_id)
safe_print(f" 💾 Uloženo: {filename} ({len(content) / 1024:.1f} kB)")
conn.commit()
# Označíme jako zpracované i když nebyly nalezeny žádné přílohy
with conn.cursor() as cur:
cur.execute("UPDATE pozadavky SET attachmentsProcessed = NOW() WHERE id = %s", (req_id,))
conn.commit()
time.sleep(0.3)
except Exception as e:
print(f" ❌ Chyba u {req_id}: {e}")
conn.close()
safe_print("\n🎯 Všechny přílohy byly zpracovány.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,252 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
import time
import sys
# Force UTF-8 output even under Windows Task Scheduler
import sys
try:
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
except AttributeError:
# Python < 3.7 fallback (not needed for you, but safe)
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
# ==============================
# 🛡 SAFE PRINT FOR CP1250 / EMOJI
# ==============================
def safe_print(text: str = ""):
enc = sys.stdout.encoding or ""
if not enc.lower().startswith("utf"):
# Strip emoji and characters outside BMP for Task Scheduler
text = ''.join(ch for ch in text if ord(ch) < 65536)
try:
print(text)
except UnicodeEncodeError:
# ASCII fallback
text = ''.join(ch for ch in text if ord(ch) < 128)
print(text)
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
"""Replace invalid filename characters with underscore."""
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def make_abbrev(title: str) -> str:
if not title:
return ""
words = re.findall(r"[A-Za-zÁ-Žá-ž0-9]+", title)
abbr = ""
for w in words:
if w.isdigit():
abbr += w
else:
abbr += w[0]
return abbr.upper()
# ==============================
# 🧹 DELETE UNEXPECTED FILES
# ==============================
def clean_folder(folder: Path, valid_files: set):
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file():
if f.name.startswith(""):
continue
sanitized = sanitize_name(f.name)
if sanitized not in valid_files:
safe_print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
safe_print(f"⚠️ Could not delete {f}: {e}")
# ==============================
# 📦 DB CONNECTION
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur_meta = conn.cursor(pymysql.cursors.DictCursor)
cur_blob = conn.cursor()
safe_print("🔍 Loading metadata from DB (FAST)…")
cur_meta.execute("""
SELECT d.id AS download_id,
d.request_id,
d.filename,
d.created_at,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni,
p.displayTitle
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC
""")
rows = cur_meta.fetchall()
safe_print(f"📋 Found {len(rows)} attachment records.\n")
# ==============================
# 🧠 MAIN LOOP WITH PROGRESS
# ==============================
unique_request_ids = []
seen = set()
for r in rows:
req_id = r["request_id"]
if req_id not in seen:
unique_request_ids.append(req_id)
seen.add(req_id)
total_requests = len(unique_request_ids)
safe_print(f"🔄 Processing {total_requests} unique requests...\n")
processed_requests = set()
current_index = 0
for r in rows:
req_id = r["request_id"]
if req_id in processed_requests:
continue
processed_requests.add(req_id)
current_index += 1
percent = (current_index / total_requests) * 100
safe_print(f"\n[ {percent:5.1f}% ] Processing request {current_index} / {total_requests}{req_id}")
# ========== FETCH VALID FILENAMES ==========
cur_meta.execute(
"SELECT filename FROM medevio_downloads WHERE request_id=%s",
(req_id,)
)
valid_files = {sanitize_name(row["filename"]) for row in cur_meta.fetchall()}
# ========== BUILD FOLDER NAME ==========
updated_at = r["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(r["prijmeni"] or "Unknown")
jmeno = sanitize_name(r["jmeno"] or "")
title = r.get("displayTitle") or ""
abbr = make_abbrev(title)
clean_folder_name = sanitize_name(
f"{date_str} {prijmeni}, {jmeno} [{abbr}] {req_id}"
)
# ========== DETECT EXISTING FOLDER ==========
existing_folder = None
for f in BASE_DIR.iterdir():
if f.is_dir() and req_id in f.name:
existing_folder = f
break
main_folder = existing_folder if existing_folder else BASE_DIR / clean_folder_name
# ========== MERGE DUPLICATES ==========
possible_dups = [
f for f in BASE_DIR.iterdir()
if f.is_dir() and req_id in f.name and f != main_folder
]
for dup in possible_dups:
safe_print(f"♻️ Merging duplicate folder: {dup.name}")
clean_folder(dup, valid_files)
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
target = main_folder / f.name
if not target.exists():
f.rename(target)
shutil.rmtree(dup, ignore_errors=True)
# ========== CLEAN MAIN FOLDER ==========
clean_folder(main_folder, valid_files)
# ========== DOWNLOAD MISSING FILES ==========
added_new_file = False
main_folder.mkdir(parents=True, exist_ok=True)
for filename in valid_files:
dest_plain = main_folder / filename
dest_marked = main_folder / ("" + filename)
if dest_plain.exists() or dest_marked.exists():
continue
added_new_file = True
cur_blob.execute(
"SELECT file_content FROM medevio_downloads "
"WHERE request_id=%s AND filename=%s",
(req_id, filename)
)
row = cur_blob.fetchone()
if not row:
continue
content = row[0]
if not content:
continue
with open(dest_plain, "wb") as f:
f.write(content)
safe_print(f"💾 Wrote: {dest_plain.relative_to(BASE_DIR)}")
# ========== REMOVE ▲ FLAG IF NEW FILES ADDED ==========
if added_new_file and "" in main_folder.name:
new_name = main_folder.name.replace("", "").strip()
new_path = main_folder.parent / new_name
if new_path != main_folder:
try:
main_folder.rename(new_path)
safe_print(f"🔄 Folder flag ▲ removed → {new_name}")
main_folder = new_path
except Exception as e:
safe_print(f"⚠️ Could not rename folder: {e}")
safe_print("\n🎯 Export complete.\n")
cur_blob.close()
cur_meta.close()
conn.close()

View File

@@ -0,0 +1,146 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import pymysql
import re
from pathlib import Path
from datetime import datetime
# ==============================
# ⚙️ CONFIGURATION
# ==============================
DB_CONFIG = {
"host": "192.168.1.50",
"port": 3306,
"user": "root",
"password": "Vlado9674+",
"database": "medevio",
"charset": "utf8mb4",
}
BASE_DIR = Path(r"u:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
BASE_DIR.mkdir(parents=True, exist_ok=True)
def sanitize_name(name: str) -> str:
return re.sub(r'[<>:"/\\|?*\x00-\x1F]', "_", name).strip()
def clean_folder(folder: Path, valid_files: set):
"""Remove files that do NOT exist in MySQL for this request."""
if not folder.exists():
return
for f in folder.iterdir():
if f.is_file() and sanitize_name(f.name) not in valid_files:
print(f"🗑️ Removing unexpected file: {f.name}")
try:
f.unlink()
except Exception as e:
print(f"⚠️ Cannot delete {f}: {e}")
# ==============================
# 📥 LOAD EVERYTHING IN ONE QUERY
# ==============================
conn = pymysql.connect(**DB_CONFIG)
cur = conn.cursor(pymysql.cursors.DictCursor)
print("📥 Loading ALL metadata + BLOBs with ONE MySQL query…")
cur.execute("""
SELECT
d.id AS download_id,
d.request_id,
d.filename,
d.file_content,
p.updatedAt AS req_updated_at,
p.pacient_jmeno AS jmeno,
p.pacient_prijmeni AS prijmeni
FROM medevio_downloads d
JOIN pozadavky p ON d.request_id = p.id
ORDER BY p.updatedAt DESC, d.created_at ASC
""")
rows = cur.fetchall()
print(f"📦 Loaded {len(rows)} total file rows.\n")
conn.close()
# ==============================
# 🔄 ORGANIZE ROWS PER REQUEST
# ==============================
requests = {} # req_id → list of file dicts
for r in rows:
req_id = r["request_id"]
if req_id not in requests:
requests[req_id] = []
requests[req_id].append(r)
print(f"📌 Unique requests: {len(requests)}\n")
# ==============================
# 🧠 MAIN LOOP SAME LOGIC AS BEFORE
# ==============================
for req_id, filelist in requests.items():
# ========== GET UPDATEDAT (same logic) ==========
any_row = filelist[0]
updated_at = any_row["req_updated_at"] or datetime.now()
date_str = updated_at.strftime("%Y-%m-%d")
prijmeni = sanitize_name(any_row["prijmeni"] or "Unknown")
jmeno = sanitize_name(any_row["jmeno"] or "")
folder_name = sanitize_name(f"{date_str} {prijmeni}, {jmeno} {req_id}")
main_folder = BASE_DIR / folder_name
# ========== VALID FILES ==========
valid_files = {sanitize_name(r["filename"]) for r in filelist}
# ========== FIND OLD FOLDERS ==========
possible_dups = [
f for f in BASE_DIR.iterdir()
if f.is_dir() and req_id in f.name and f != main_folder
]
# ========== MERGE OLD FOLDERS ==========
for dup in possible_dups:
print(f"♻️ Merging folder: {dup.name}")
clean_folder(dup, valid_files)
main_folder.mkdir(parents=True, exist_ok=True)
for f in dup.iterdir():
if f.is_file():
target = main_folder / f.name
if not target.exists():
f.rename(target)
shutil.rmtree(dup, ignore_errors=True)
# ========== CLEAN MAIN FOLDER ==========
main_folder.mkdir(parents=True, exist_ok=True)
clean_folder(main_folder, valid_files)
# ========== SAVE FILES (fast now) ==========
for r in filelist:
filename = sanitize_name(r["filename"])
dest = main_folder / filename
if dest.exists():
continue
content = r["file_content"]
if not content:
continue
with open(dest, "wb") as f:
f.write(content)
print(f"💾 Saved: {dest.relative_to(BASE_DIR)}")
print("\n🎯 Export complete.\n")

1
12 Tower1/token.txt Normal file
View File

@@ -0,0 +1 @@
nYvrvgflIKcDiQg8Hhpud+qG8iGZ8eH8su4nyT/Mgcm7XQp65ygY9s39+O01wIpk/7sKd6fBHkiKvsqH

View File

@@ -5,6 +5,7 @@ import requests
import mysql.connector import mysql.connector
from pathlib import Path from pathlib import Path
import sys import sys
from datetime import datetime
# UTF-8 handling # UTF-8 handling
try: try:
@@ -18,6 +19,11 @@ except:
TOKEN_PATH = Path("token.txt") TOKEN_PATH = Path("token.txt")
GRAPHQL_URL = "https://api.medevio.cz/graphql" GRAPHQL_URL = "https://api.medevio.cz/graphql"
# --- ZPRACOVÁNÍ ---
# Zadejte počet požadavků ke zpracování.
# 0 znamená zpracovat VŠECHNY nesynchronizované požadavky.
PROCESS_LIMIT = 10 # <-- Používáme PROCESS_LIMIT
# --- MySQL DB --- # --- MySQL DB ---
DB_CONFIG = { DB_CONFIG = {
"host": "192.168.1.76", "host": "192.168.1.76",
@@ -40,51 +46,86 @@ def read_token(p: Path) -> str:
# === DB Funkce === # === DB Funkce ===
def get_latest_open_request_id_from_db(): def get_requests_to_process_from_db(limit):
""" """
Získá ID, Titul, Jméno a Příjmení nejnovějšího otevřeného požadavku z MySQL. Získá seznam požadavků (ID, Titul, Jméno, Příjmení) k synchronizaci z MySQL.
Použije LIMIT, pokud limit > 0.
""" """
print("🔍 Připojuji se k MySQL a hledám ID nejnovějšího otevřeného požadavku...") if limit == 0:
print("🔍 Připojuji se k MySQL a hledám **VŠECHNY** nesynchronizované požadavky...")
else:
print(f"🔍 Připojuji se k MySQL a hledám **{limit}** nesynchronizovaných požadavků...")
requests_list = []
conn = None
try: try:
conn = mysql.connector.connect(**DB_CONFIG) conn = mysql.connector.connect(**DB_CONFIG)
cursor = conn.cursor() cursor = conn.cursor()
# SQL dotaz: Nyní vybíráme navíc jméno a příjmení pacienta # Základní SQL dotaz
query = """ query = """
SELECT id, displayTitle, pacient_jmeno, pacient_prijmeni SELECT id, displayTitle, pacient_jmeno, pacient_prijmeni
FROM pozadavky FROM pozadavky
WHERE doneAt IS NULL WHERE doneAt IS NULL
AND noteSyncedAt IS NULL
ORDER BY updatedAt DESC ORDER BY updatedAt DESC
LIMIT 1;
""" """
# Podmíněné přidání LIMIT klauzule
if limit > 0:
query += f"LIMIT {limit};"
else:
query += ";"
cursor.execute(query) cursor.execute(query)
result = cursor.fetchone() results = cursor.fetchall()
cursor.close() for result in results:
conn.close()
if result:
request_id, display_title, jmeno, prijmeni = result request_id, display_title, jmeno, prijmeni = result
print(f"✅ Nalezen požadavek ID: {request_id} (Titul: {display_title})") requests_list.append({
print(f" Pacient: **{prijmeni} {jmeno}**") # Vypíšeme pro snadnou kontrolu
return {
"id": request_id, "id": request_id,
"displayTitle": display_title, "displayTitle": display_title,
"jmeno": jmeno, "jmeno": jmeno,
"prijmeni": prijmeni "prijmeni": prijmeni
} })
print("❌ Nebyl nalezen žádný otevřený požadavek v DB.") cursor.close()
return None
if requests_list:
print(f"✅ Nalezeno {len(requests_list)} požadavků ke zpracování.")
else:
print("❌ Nebyl nalezen žádný nesynchronizovaný otevřený požadavek v DB.")
return requests_list
except mysql.connector.Error as err: except mysql.connector.Error as err:
print(f"❌ Chyba při připojení/dotazu MySQL: {err}") print(f"❌ Chyba při připojení/dotazu MySQL: {err}")
return None return []
finally:
if conn and conn.is_connected():
conn.close()
# === GraphQL Operace === def update_db_sync_time(request_id, conn):
# Tyto GraphQL dotazy jsou beze změny """Aktualizuje sloupec noteSyncedAt v tabulce pozadavky. Používá existující připojení."""
cursor = conn.cursor()
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
update_query = """
UPDATE pozadavky
SET noteSyncedAt = %s
WHERE id = %s;
"""
cursor.execute(update_query, (current_time, request_id))
conn.commit()
cursor.close()
print(f" (DB: Čas synchronizace pro {request_id} uložen)")
# === GraphQL Operace (Beze Změny) ===
QUERY_GET_NOTE = r""" QUERY_GET_NOTE = r"""
query ClinicRequestNotes_Get($patientRequestId: String!) { query ClinicRequestNotes_Get($patientRequestId: String!) {
@@ -103,6 +144,14 @@ mutation ClinicRequestNotes_Update($noteInput: UpdateClinicPatientRequestNoteInp
} }
""" """
MUTATION_CREATE_NOTE = r"""
mutation ClinicRequestNotes_Create($noteInput: CreateClinicPatientRequestNoteInput!) {
createClinicPatientRequestNote(noteInput: $noteInput) {
id
}
}
"""
def gql(query, variables, token): def gql(query, variables, token):
"""Obecná funkce pro volání GraphQL endpointu.""" """Obecná funkce pro volání GraphQL endpointu."""
@@ -120,63 +169,92 @@ def gql(query, variables, token):
def get_internal_note(request_id, token): def get_internal_note(request_id, token):
"""Získá jedinou interní poznámku (obsah a ID) pro daný požadavek.""" """Získá jedinou interní poznámku (obsah a ID) pro daný požadavek."""
print(f"🔍 Načítám poznámku z Medevia k požadavku {request_id}...")
data = gql(QUERY_GET_NOTE, {"patientRequestId": request_id}, token) data = gql(QUERY_GET_NOTE, {"patientRequestId": request_id}, token)
notes = data.get("data", {}).get("notes", []) notes = data.get("data", {}).get("notes", [])
return notes[0] if notes else None
if notes:
print("✅ Interní poznámka nalezena.")
return notes[0]
print(f"⚠️ Interní poznámka pro požadavek {request_id} neexistuje.")
return None
def update_internal_note(note_id, new_content, token): def update_internal_note(note_id, new_content, token):
"""Aktualizuje obsah poznámky v Medeviu.""" """Aktualizuje obsah poznámky v Medeviu."""
variables = {"noteInput": {"id": note_id, "content": new_content}} variables = {"noteInput": {"id": note_id, "content": new_content}}
print(f"📝 Odesílám aktualizaci poznámky {note_id}...")
return gql(MUTATION_UPDATE_NOTE, variables, token) return gql(MUTATION_UPDATE_NOTE, variables, token)
def create_internal_note(request_id, content, token):
"""Vytvoří novou interní poznámku k požadavku v Medeviu."""
variables = {"noteInput": {"requestId": request_id, "content": content}}
return gql(MUTATION_CREATE_NOTE, variables, token)
# === MAIN === # === MAIN ===
def main(): def main():
token = read_token(TOKEN_PATH) token = read_token(TOKEN_PATH)
# 1. Zjistit ID a jméno pacienta z TVÉ DB # 1. Získat seznam ID požadavků ke zpracování (používáme PROCESS_LIMIT)
latest_request = get_latest_open_request_id_from_db() requests_to_process = get_requests_to_process_from_db(PROCESS_LIMIT)
if not latest_request:
if not requests_to_process:
return return
request_id = latest_request["id"] # Pro update DB time otevřeme připojení jednou a použijeme ho v cyklu
conn = mysql.connector.connect(**DB_CONFIG)
# 2. Získat existující interní poznámku z Medevia print("\n=============================================")
note = get_internal_note(request_id, token) print(f"START ZPRACOVÁNÍ {len(requests_to_process)} POŽADAVKŮ")
if not note: print("=============================================\n")
return
note_id = note["id"] for idx, request in enumerate(requests_to_process, 1):
old_content = note["content"] or "" request_id = request["id"]
# 3. Vytvořit nový obsah (ID požadavku jako první řádek) print(
# Text, který vložíme na začátek f"[{idx}/{len(requests_to_process)}] Zpracovávám požadavek: {request['prijmeni']} {request['jmeno']} (ID: {request_id})")
prepend_text = f"ID DB Synchronizace: {request_id}\n"
new_content = prepend_text + old_content
print("--- Nový obsah který odešlu (začátek) ---") # 2. Vytvořit text, který chceme přidat/vytvořit
print(f"-> {prepend_text.strip()}") prepend_text = f"ID: {request_id}\n"
print("------------------------------------------")
# 4. Aktualizovat poznámku v Medeviu # 3. Pokusit se získat existující interní poznámku z Medevia
try: note = get_internal_note(request_id, token)
update_internal_note(note_id, new_content, token)
print(f"\n✅ Úspěch! Poznámka {note_id} k požadavku {request_id} byla aktualizována v Medeviu.") medevio_update_success = False
print(f" **Zkontroluj požadavek pacienta: {latest_request['prijmeni']} {latest_request['jmeno']}**")
except requests.exceptions.HTTPError as e: if note:
print(f"\n❌ Chyba při aktualizaci Medevio API: {e}") # A) POZNÁMKA EXISTUJE -> AKTUALIZOVAT
except Exception as e: note_id = note["id"]
print(f"\n❌ Neočekávaná chyba: {e}") old_content = note["content"] or ""
new_content = prepend_text + old_content
try:
# Odeslání aktualizace
update_internal_note(note_id, new_content, token)
print(f" (Medevio: Poznámka {note_id} **aktualizována**.)")
medevio_update_success = True
except requests.exceptions.HTTPError as e:
print(f" ❌ Chyba při aktualizaci Medevio API: {e}")
else:
# B) POZNÁMKA NEEXISTUJE -> VYTVOŘIT
new_content = prepend_text.strip()
try:
# Odeslání vytvoření
result = create_internal_note(request_id, new_content, token)
new_note_id = result.get("data", {}).get("createClinicPatientRequestNote", {}).get("id", "N/A")
print(f" (Medevio: Nová poznámka {new_note_id} **vytvořena**.)")
medevio_update_success = True
except requests.exceptions.HTTPError as e:
print(f" ❌ Chyba při vytváření Medevio API: {e}")
# 4. AKTUALIZACE ČASOVÉHO RAZÍTKA V DB
if medevio_update_success:
update_db_sync_time(request_id, conn)
print("---------------------------------------------")
# Uzavřeme připojení k DB po dokončení cyklu
if conn and conn.is_connected():
conn.close()
print("\n✅ Všechny požadavky zpracovány. Připojení k DB uzavřeno.")
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -0,0 +1 @@
{"cookies": [{"name": "gateway-access-token", "value": "YwBgkf8McREDKs7vCZj0EZD2fJsuV8RyDPtYx7WiDoz0nFJ9kxId8kcNEPBLFSwM+Tiz80+SOdFwo+oj", "domain": "my.medevio.cz", "path": "/", "expires": 1763372319, "httpOnly": false, "secure": false, "sameSite": "Lax"}, {"name": "aws-waf-token", "value": "b6a1d4eb-4350-40e5-8e52-1f5f9600fbb8:CgoAr9pC8c6zAAAA:OYwXLY5OyitSQPl5v2oIlS+hIxsrb5LxV4VjCyE2gJCFFE5PQu+0Zbxse2ZIofrNv5QKs0TYUDTmxPhZyTr9Qtjnq2gsVQxWHXzrbebv3Z7RbzB63u6Ymn3Fo8IbDev3CfCNcNuxCKltFEXLqSCjI2vqNY+7HZkgQBIqy2wMgzli3aSLq0w8lWYtZzyyot7q8RPXWMGTfaBUo2reY0SOSffm9rAivE9PszNfPid71CvNrGAAoxRbwb25eVujlyIcDVWe5vZ9Iw==", "domain": ".my.medevio.cz", "path": "/", "expires": 1761125920, "httpOnly": false, "secure": true, "sameSite": "Lax"}], "origins": [{"origin": "https://my.medevio.cz", "localStorage": [{"name": "awswaf_token_refresh_timestamp", "value": "1760780309860"}, {"name": "awswaf_session_storage", "value": "b6a1d4eb-4350-40e5-8e52-1f5f9600fbb8:CgoAr9pC8c+zAAAA:+vw//1NzmePjPpbGCJzUB+orCRivtJd098DbDX4AnABiGRw/+ql6ShqvFY4YdCY7w2tegb5mEPBdAmc4sNi22kNR9BuEoAgCUiMhkU1AZWfzM51zPfTh7SveCrREZ7xdvxcqKPMmfVLRYX5E4+UWh22z/LKQ7+d9VERp3J+wWCUW3dFFirkezy3N7b2FVjTlY/RxsZwhejQziTG/L3CkIFFP3mOReNgBvDpj7aKoM1knY4IL4TZ8E7zNv3nTsvzACLYvnUutVOUcofN1TfOzwZshSKsEXsMzrQn8PzLccX1jM5VSzce7gfEzl0zSPsT8NB3Sna+rhMIttDNYgvbW1HsfG2LIeKMR27Zf8hkslDRVVkcU/Kp2jLOEdhhrBKGjKY2o9/uX3NExdzh5MEKQSSRtmue01BpWYILPH23rMsz4YSmF+Ough5OeQoC95rkcYwVXMhwvUN9Zfp9UZ4xCNfFUex5dOrg9aJntYRnaceeocGUttNI5AdT0i3+osV6XHXzKxeqO8zLCS9BIsCzxaHfdqqem5DorMceuGKz+QqksatIQAA=="}, {"name": "Application.Intl.locale", "value": "cs"}, {"name": "Password.prefill", "value": "{\"username\":\"vladimir.buzalka@buzalka.cz\",\"type\":\"email\"}"}]}]}

View File

@@ -0,0 +1,46 @@
import os
from pathlib import Path
# Define the target directory
target_path = Path(r"U:\Dropbox\Ordinace\Dokumentace_ke_zpracování\MP")
def rename_folders():
# Ensure the path exists
if not target_path.exists():
print(f"Error: The path {target_path} does not exist.")
return
# Iterate through items in the directory
for folder in target_path.iterdir():
# Only process directories
if folder.is_dir():
original_name = folder.name
# Check if name starts with the triangle
if original_name.startswith(""):
# 1. Remove the triangle from the start
name_without_tri = original_name[1:]
# 2. Prepare the name to be at least 10 chars long
# (so the triangle can sit at index 10 / position 11)
clean_name = name_without_tri.ljust(10)
# 3. Construct new name: first 10 chars + triangle + the rest
new_name = clean_name[:10] + "" + clean_name[10:]
# Remove trailing spaces if the original name was short
# but you don't want extra spaces at the very end
new_name = new_name.rstrip()
new_folder_path = folder.parent / new_name
try:
print(f"Renaming: '{original_name}' -> '{new_name}'")
folder.rename(new_folder_path)
except Exception as e:
print(f"Could not rename {original_name}: {e}")
if __name__ == "__main__":
rename_folders()

315
dddddd.py Normal file
View File

@@ -0,0 +1,315 @@
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
FAST FILE HASH INDEXER UNRAID (BLAKE3 ONLY, ALL SHARES)
- HARDCODED SINGLE SHARE MODE
- SQL OPTIMIZATION
- STRICT MODE (NO TOLERANCE) - Updates DB on any mismatch
"""
import os
import pymysql
import socket
import platform
from blake3 import blake3
# ==============================
# ENV / HOST
# ==============================
HOSTNAME = socket.gethostname()
OS_NAME = platform.system()
# ZDE JE TO NATVRDO PRO TESTOVÁNÍ:
# SCAN_ONLY_THIS = None #"#Fotky"
SCAN_ONLY_THIS = '#Library' # "#Fotky"
# ==============================
# CONFIG
# ==============================
EXCLUDED_SHARES = {"domains", "appdata", "system", "isos"}
# --- File size limits (bytes) ---
FILE_MIN_SIZE = 0
FILE_MAX_SIZE = 1024 * 1024 * 1024 * 1024 # 50MB
DB_CONFIG = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "torrents",
"charset": "utf8mb4",
"autocommit": True,
}
CHUNK_SIZE = 4 * 1024 * 1024 # 4 MB
PRINT_SKIPPED = False
# ==============================
# HASH
# ==============================
def compute_blake3(path: str) -> bytes:
h = blake3()
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(CHUNK_SIZE), b""):
h.update(chunk)
return h.digest()
# ==============================
# SHARE / PATH HELPERS
# ==============================
def get_user_shares():
if SCAN_ONLY_THIS:
path = f"/mnt/user/{SCAN_ONLY_THIS}"
if os.path.isdir(path):
print(f"🎯 SINGLE SHARE MODE ACTIVE: Scanning only '{SCAN_ONLY_THIS}'")
return [SCAN_ONLY_THIS]
else:
print(f"⚠️ ERROR: Requested share '{SCAN_ONLY_THIS}' not found in /mnt/user!")
return []
shares = []
if not os.path.exists("/mnt/user"):
return []
for name in os.listdir("/mnt/user"):
if name.startswith("."):
continue
if name in EXCLUDED_SHARES:
continue
path = f"/mnt/user/{name}"
if os.path.isdir(path):
shares.append(name)
return sorted(shares)
def find_physical_roots(shares):
roots = []
if not os.path.exists("/mnt"):
return []
for disk in os.listdir("/mnt"):
if not disk.startswith("disk"):
continue
for share in shares:
path = f"/mnt/{disk}/{share}"
if os.path.isdir(path):
roots.append((share, path))
return sorted(roots)
def logical_path_from_disk_path(disk_path: str) -> str:
if not disk_path.startswith("/mnt/disk"):
raise ValueError(f"Unexpected disk path: {disk_path}")
parts = disk_path.split("/", 3)
return f"/mnt/user/{parts[3]}"
def size_allowed(size: int) -> bool:
if FILE_MIN_SIZE is not None and size < FILE_MIN_SIZE:
return False
if FILE_MAX_SIZE is not None and size > FILE_MAX_SIZE:
return False
return True
# ==============================
# MAIN
# ==============================
def main():
print("🚀 BLAKE3 indexer starting", flush=True)
print(f"🖥 Host: {HOSTNAME} | OS: {OS_NAME}", flush=True)
if FILE_MIN_SIZE or FILE_MAX_SIZE:
print(f"📏 File size limits: min={FILE_MIN_SIZE} max={FILE_MAX_SIZE}", flush=True)
shares = get_user_shares()
if not shares:
print("❌ No user shares to index!", flush=True)
return
print("📦 User shares to index:", flush=True)
for s in shares:
print(f" - {s}", flush=True)
scan_roots = find_physical_roots(shares)
if not scan_roots:
print("❌ No physical disk roots found!", flush=True)
return
print("📂 Physical scan roots:", flush=True)
for _, path in scan_roots:
print(f" - {path}", flush=True)
try:
db = pymysql.connect(**DB_CONFIG)
cur = db.cursor()
# === TOTO JE TEN PŘÍKAZ "NEPŘEMÝŠLEJ" ===
# Nastaví relaci na UTC. MySQL přestane posouvat časy o hodinu sem a tam.
# cur.execute("SET time_zone = '+00:00'")
# =========================================
except Exception as e:
print(f"❌ Database connection failed: {e}")
return
print("📥 Loading already indexed files into memory...", flush=True)
# === OPTIMALIZACE SQL ===
if SCAN_ONLY_THIS:
search_pattern = f"/mnt/user/{SCAN_ONLY_THIS}%"
print(f"⚡ OPTIMIZATION: Fetching only DB records for '{search_pattern}'", flush=True)
cur.execute("""
SELECT full_path, file_size, UNIX_TIMESTAMP(mtime)
FROM file_md5_index
WHERE host_name = %s AND full_path LIKE %s
""", (HOSTNAME, search_pattern))
else:
cur.execute("""
SELECT full_path, file_size, UNIX_TIMESTAMP(mtime)
FROM file_md5_index
WHERE host_name = %s
""", (HOSTNAME,))
# Načteme do slovníku pro rychlé vyhledávání
# Formát: { "cesta": (velikost, mtime) }
indexed_map = {row[0]: (row[1], row[2]) for row in cur.fetchall()}
print(f"✅ Loaded {len(indexed_map):,} indexed entries", flush=True)
print("======================================", flush=True)
new_files = 0
skipped = 0
filtered = 0
seen_paths = set()
# --- SCAN ---
for share, scan_root in scan_roots:
for root, _, files in os.walk(scan_root):
for fname in files:
disk_path = os.path.join(root, fname)
try:
stat = os.stat(disk_path)
except OSError:
continue
size = stat.st_size
if not size_allowed(size):
filtered += 1
continue
logical_path = logical_path_from_disk_path(disk_path)
if logical_path in seen_paths:
continue
seen_paths.add(logical_path)
mtime = int(stat.st_mtime)
# === PŘÍSNÁ KONTROLA (ŽÁDNÁ TOLERANCE) ===
# Pokud soubor v DB existuje a přesně sedí velikost i čas, přeskočíme ho.
# Vše ostatní (včetně posunu času o 1s) se považuje za změnu a aktualizuje se.
is_match = False
if logical_path in indexed_map:
db_size, db_mtime = indexed_map[logical_path]
if size == db_size and mtime == db_mtime:
is_match = True
if is_match:
skipped += 1
if PRINT_SKIPPED:
print(f"⏭ SKIP {logical_path}", flush=True)
continue
# ============================================
print(" NEW / UPDATED", flush=True)
print(f" File: {logical_path}", flush=True)
print(f" Size: {size:,} B", flush=True)
try:
b3 = compute_blake3(disk_path)
except Exception as e:
print(f"❌ BLAKE3 failed: {e}", flush=True)
continue
# Zde proběhne UPDATE mtime na hodnotu z disku
cur.execute("""
INSERT INTO file_md5_index
(os_name, host_name, full_path, file_name, directory,
file_size, mtime, blake3)
VALUES (%s, %s, %s, %s, %s, %s, FROM_UNIXTIME(%s), %s)
ON DUPLICATE KEY UPDATE
file_size = VALUES(file_size),
mtime = VALUES(mtime),
blake3 = VALUES(blake3),
updated_at = CURRENT_TIMESTAMP
""", (
OS_NAME,
HOSTNAME,
logical_path,
fname,
os.path.dirname(logical_path),
size,
mtime,
b3,
))
new_files += 1
print(f" B3 : {b3.hex()}", flush=True)
print("--------------------------------------", flush=True)
print("======================================", flush=True)
print(f"✅ New / updated : {new_files}", flush=True)
print(f"⏭ Skipped : {skipped}", flush=True)
print(f"🚫 Size filtered: {filtered}", flush=True)
print("🏁 Script finished", flush=True)
# ==============================
# DB CLEANUP REMOVE DELETED FILES
# ==============================
print("🧹 Checking for deleted files in DB...", flush=True)
db_paths = set(indexed_map.keys())
deleted_paths = db_paths - seen_paths
# Omezíme jen na aktuální share (pokud je aktivní)
if SCAN_ONLY_THIS:
prefix = f"/mnt/user/{SCAN_ONLY_THIS}/"
deleted_paths = {p for p in deleted_paths if p.startswith(prefix)}
if deleted_paths:
print(f"🗑 Removing {len(deleted_paths):,} deleted files from DB", flush=True)
BATCH_SIZE = 1000
deleted_paths = list(deleted_paths)
for i in range(0, len(deleted_paths), BATCH_SIZE):
batch = deleted_paths[i:i + BATCH_SIZE]
placeholders = ",".join(["%s"] * len(batch))
sql = f"""
DELETE FROM file_md5_index
WHERE host_name = %s
AND full_path IN ({placeholders})
"""
cur.execute(sql, (HOSTNAME, *batch))
print("✅ DB cleanup completed", flush=True)
else:
print("✅ No deleted files found in DB", flush=True)
cur.close()
db.close()
if __name__ == "__main__":
main()