Compare commits

..

11 Commits

Author SHA1 Message Date
fd2a811f38 reporter 2026-01-03 12:21:03 +01:00
d05e08c2b9 reporter 2025-12-07 07:11:33 +01:00
4c57c92332 reporter 2025-12-07 07:11:12 +01:00
49ede2b452 notebook 2025-12-04 06:57:13 +01:00
ab2f4256aa reporter 2025-11-30 19:37:24 +01:00
1347f7dcd7 notebook 2025-11-25 20:43:58 +01:00
d75930f6e9 notebook 2025-11-25 20:43:37 +01:00
db2fe7e990 notebook 2025-11-25 15:33:42 +01:00
9e6b3a8017 notebook 2025-11-25 15:33:31 +01:00
b813555dcc notbook 2025-11-06 07:07:29 +01:00
0375982137 notbook 2025-11-05 19:54:51 +01:00
67 changed files with 1258070 additions and 2 deletions

View File

@@ -0,0 +1,400 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import time
from datetime import date, timedelta
from pathlib import Path
import json
import requests
import mysql.connector
from mysql.connector import Error
from typing import Dict, Any, List
import hashlib
# ====================================================================
# A. Vynucení UTF-8 pro správnou diakritiku v plánovaných úlohách
# ====================================================================
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
"""
FIO MULTIACCOUNT IMPORTER — VERZE S ROBUSTNĚJŠÍM HANDLINGEM PK
===============================================================
- mysql.connector (Oracle) pro stabilní manipulaci s datovými typy
- Bezpečné generování id_operace, pokud chybí Column22
- Správné mapování id_pokynu = Column19
- Detailní logování chybných řádků
"""
# =========================================
# CONFIGURATION
# =========================================
ACCOUNTS_FILE = r"c:\users\vlado\PycharmProjects\FIO\accounts.json"
JSON_BASE_DIR = r"z:\Dropbox\!!!Days\Downloads Z230\Fio"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
DAYS_BACK = 90
# Zapíná detailnější logování při chybách insertu
DEBUG_ON_ERROR = True
# =========================================
# HELPERS
# =========================================
def load_accounts(path: str) -> List[Dict[str, str]]:
"""Reads accounts.json and validates content."""
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing '{key}' in account config: {acc}")
return accounts
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
"""Constructs the URL for Fio REST API periods endpoint."""
from_str = d_from.strftime("%Y-%m-%d")
to_str = d_to.strftime("%Y-%m-%d")
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date) -> Any:
"""Calls Fio API and fetches JSON."""
url = fio_url_for_period(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f" ❌ HTTP {resp.status_code} from Fio: {url}", flush=True)
return None
try:
return resp.json()
except json.JSONDecodeError:
print(" ❌ Cannot decode JSON from Fio response", flush=True)
return None
def safe_col(t: dict, n: int) -> Any:
"""SAFE ACCESSOR for Fio transaction column numbers (ColumnN)."""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt_str: str) -> str:
"""Strips timezone from Fio date string ("YYYY-MM-DD+HH:MM") → "YYYY-MM-DD"."""
if not dt_str:
return None
return str(dt_str)[:10]
def ensure_dir(path: Path):
"""Creates directory if it doesnt exist."""
path.mkdir(parents=True, exist_ok=True)
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date) -> Path:
"""Saves raw JSON to disk."""
acc_num_raw = account_cfg["account_number"]
acc_folder_name = acc_num_raw.replace("/", "_")
out_dir = Path(base_dir) / acc_folder_name
ensure_dir(out_dir)
filename = f"{d_from.strftime('%Y-%m-%d')}_to_{d_to.strftime('%Y-%m-%d')}.json"
out_path = out_dir / filename
with open(out_path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return out_path
def generate_fallback_id(fio_acc_id: str, t: dict) -> str:
"""
Vygeneruje deterministický fallback ID, pokud chybí Column22 (id pohybu).
Použije SHA1 hash z několika sloupců a ořízne na 20 znaků, aby se vešlo
do VARCHAR(20) primárního klíče.
"""
raw_date = clean_date(safe_col(t, 0)) or ""
amount = str(safe_col(t, 1) or "")
protiucet = str(safe_col(t, 2) or "")
vs = str(safe_col(t, 5) or "")
source = f"{fio_acc_id}|{raw_date}|{amount}|{protiucet}|{vs}"
digest = hashlib.sha1(source.encode("utf-8")).hexdigest()
return digest[:20]
# =========================================
# MAIN IMPORT LOGIC
# =========================================
def main():
start_all = time.time()
today = date.today()
d_from = today - timedelta(days=DAYS_BACK)
d_to = today
print("=== Fio multi-account import v3 (PK fix, lepší logování) ===", flush=True)
print(f"Období: {d_from}{d_to}", flush=True)
# Load all accounts from accounts.json
try:
accounts = load_accounts(ACCOUNTS_FILE)
except Exception as e:
print(f"FATÁLNÍ CHYBA při načítání účtů: {e}", flush=True)
return
print(f" Účtů v konfiguraci: {len(accounts)}\n", flush=True)
# Připojení k DB
try:
conn = mysql.connector.connect(
host=DB["host"],
port=DB["port"],
user=DB["user"],
password=DB["password"],
database=DB["database"],
charset=DB["charset"],
)
cur = conn.cursor()
except Error as e:
print(f"FATÁLNÍ CHYBA při připojení k DB: {e}", flush=True)
return
# SQL INSERT dotaz přizpůsobený nové DB struktuře
sql = """
INSERT INTO transactions
(
id_operace, cislo_uctu, transaction_date, amount, currency,
protiucet, kod_banky, nazev_protiuctu, nazev_banky, typ,
vs, ks, ss, uziv_identifikace, zprava_pro_prijemce,
provedl, id_pokynu, komentar, upr_objem_mena, api_bic, reference_platce
)
VALUES
(
%(id_operace)s, %(cislo_uctu)s, %(transaction_date)s, %(amount)s, %(currency)s,
%(protiucet)s, %(kod_banky)s, %(nazev_protiuctu)s, %(nazev_banky)s, %(typ)s,
%(vs)s, %(ks)s, %(ss)s, %(uziv_identifikace)s, %(zprava_pro_prijemce)s,
%(provedl)s, %(id_pokynu)s, %(komentar)s, %(upr_objem_mena)s, %(api_bic)s, %(reference_platce)s
)
ON DUPLICATE KEY UPDATE
cislo_uctu = VALUES(cislo_uctu),
transaction_date = VALUES(transaction_date),
amount = VALUES(amount),
currency = VALUES(currency),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
nazev_banky = VALUES(nazev_banky),
typ = VALUES(typ),
vs = VALUES(vs),
ks = VALUES(ks),
ss = VALUES(ss),
uziv_identifikace = VALUES(uziv_identifikace),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
provedl = VALUES(provedl),
id_pokynu = VALUES(id_pokynu),
komentar = VALUES(komentar),
upr_objem_mena = VALUES(upr_objem_mena),
api_bic = VALUES(api_bic),
reference_platce = VALUES(reference_platce)
"""
total_inserted = 0
total_skipped_pk = 0
total_skipped_error = 0
# ======================================================
# PROCESS EACH ACCOUNT IN accounts.json
# ======================================================
for acc in accounts:
name = acc["name"]
cfg_acc_num = acc["account_number"]
token = acc["token"]
print(f"--- Účet: {name} ({cfg_acc_num}) ---", flush=True)
t0 = time.time()
# 1) Download JSON from Fio API
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print(" Přeskakuji, žádná data / chyba API.\n", flush=True)
continue
# 2) Save raw JSON file to disk
try:
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f" JSON uložen do: {json_path}", flush=True)
except Exception as e:
print(f" ❌ Chyba při ukládání JSON souboru: {e}", flush=True)
# 3) Extract transactions from JSON tree
tlist = data.get("accountStatement", {}).get("transactionList", {}).get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f" Počet transakcí v období: {len(tlist)}", flush=True)
if not tlist:
print(" Žádné transakce, jdu dál.\n", flush=True)
continue
fio_acc_id = data.get("accountStatement", {}).get("info", {}).get("accountId")
if cfg_acc_num and fio_acc_id and cfg_acc_num.split("/")[0] not in fio_acc_id:
print(
f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})",
flush=True,
)
# 4) Build list of MySQL rows
rows = []
skipped_pk_account = 0
for t in tlist:
# id_operace = Column22 (tvé "id pohybu")
id_operace_val = safe_col(t, 22)
# Pokud chybí, vygenerujeme si stabilní fallback (hash) vejde se do VARCHAR(20)
if id_operace_val is None:
fallback = generate_fallback_id(fio_acc_id or "", t)
id_operace_val = fallback
# Můžeš odkomentovat, pokud chceš vidět, kde se používá fallback
# print(f" ⚠ Fallback id_operace (hash) pro transakci: {fallback}", flush=True)
# Bez PK nemá smysl zápis jen pro jistotu, fallback by měl vše pokrýt
if id_operace_val is None:
skipped_pk_account += 1
continue
transaction_date = clean_date(safe_col(t, 0))
if not transaction_date:
# Bez data by insert stejně spadl (NOT NULL), tak to raději přeskočíme
if DEBUG_ON_ERROR:
print(f" ⚠ Přeskakuji transakci bez data, id_operace={id_operace_val}", flush=True)
skipped_pk_account += 1
continue
id_pokynu_val = safe_col(t, 19) # tvé "id pokynu" = Column19
row = {
"id_operace": str(id_operace_val),
"cislo_uctu": fio_acc_id,
"transaction_date": transaction_date,
"amount": safe_col(t, 1),
"currency": safe_col(t, 14),
"typ": safe_col(t, 8),
"provedl": safe_col(t, 9),
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"nazev_protiuctu": safe_col(t, 10),
"nazev_banky": safe_col(t, 12),
"api_bic": safe_col(t, 26),
"vs": safe_col(t, 5),
"ks": safe_col(t, 4),
"ss": safe_col(t, 6),
"zprava_pro_prijemce": safe_col(t, 16),
"uziv_identifikace": safe_col(t, 7),
"komentar": safe_col(t, 25),
"upr_objem_mena": safe_col(t, 18),
"id_pokynu": str(id_pokynu_val) if id_pokynu_val is not None else None,
"reference_platce": safe_col(t, 27),
}
rows.append(row)
if skipped_pk_account:
print(f" ⚠ Přeskočeno {skipped_pk_account} transakcí kvůli chybějícímu/invalidnímu PK nebo datu.", flush=True)
total_skipped_pk += skipped_pk_account
# 5) INSERT rows into MySQL in batches
inserted = 0
skipped_error_account = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i: i + BATCH_SIZE]
try:
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
except Error as e:
print(f" ❌ Chyba při zápisu batch do DB: {e}", flush=True)
conn.rollback()
if DEBUG_ON_ERROR:
print(" ► Přecházím na per-row insert pro detail chyb...", flush=True)
for row in chunk:
try:
cur.execute(sql, row)
conn.commit()
inserted += 1
except Error as e_row:
skipped_error_account += 1
conn.rollback()
print(
f" ✗ Chybná transakce id_operace={row.get('id_operace')} "
f"datum={row.get('transaction_date')} částka={row.get('amount')} "
f"{e_row}",
flush=True,
)
elapsed = time.time() - t0
total_inserted += inserted
total_skipped_error += skipped_error_account
print(
f" ✓ Zapsáno (insert/update): {inserted} řádků do DB "
f"(přeskočeno chybějící PK/dat {skipped_pk_account}, chybou insertu {skipped_error_account}) "
f"za {elapsed:.2f} s\n",
flush=True,
)
# Close DB
cur.close()
conn.close()
total_elapsed = time.time() - start_all
print(
f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
f"Přeskočeno kvůli PK/datům: {total_skipped_pk}, kvůli chybě insertu: {total_skipped_error}. "
f"Celkový čas: {total_elapsed:.2f} s ===",
flush=True,
)
# ======================================================
# ENTRY POINT
# ======================================================
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,319 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
# Force UTF-8 output for Scheduled Tasks
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
"""
FIO EXPORT SCRIPT — FINÁLNÍ VERZE S KONTROLNÍM VÝPISEM
-------------------------------------------
Skript nyní vypisuje POČET ŘÁDKŮ NAČTENÝCH Z DATABÁZE pro každý list.
Tím ověříme, zda se data ztrácí při čtení z DB, nebo až při exportu do Excelu.
"""
import mysql.connector
from mysql.connector import Error
from openpyxl import Workbook
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
from datetime import datetime, date as dt_date
from decimal import Decimal
import os
import glob
import json
from typing import List, Dict, Any
# ======================================================
# CONFIGURATION
# ======================================================
# MySQL server parameters
DB_HOST = "192.168.1.76"
DB_PORT = 3307
DB_USER = "root"
DB_PASS = "Vlado9674+"
DB_NAME = "fio"
# Where to save Excel files
OUTPUT_DIR = r"Z:\Dropbox\!!!Days\Downloads Z230"
# JSON file with list of accounts (name + account_number)
ACCOUNTS_JSON = r"C:\Users\vlado\PycharmProjects\FIO\accounts.json"
# VÍCE SLOUPCŮ PŘENASTAVENO NA TEXT
TEXT_COLUMNS = [
"cislo_uctu",
"protiucet",
"kod_banky",
"vs",
"ks",
"ss",
"id_operace",
"id_pokynu"
]
# ======================================================
# REMOVE OLD EXPORT FILES (Beze změny)
# ======================================================
def cleanup_old_exports():
"""
Deletes older versions of exported XLSX files.
"""
patterns = [
os.path.join(OUTPUT_DIR, "*FIO*transaction*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO*transactions*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO_transactions*.xlsx"),
]
for pattern in patterns:
for file in glob.glob(pattern):
try:
os.remove(file)
print(f"🗑 Deleted old export: {file}")
except:
pass
# ======================================================
# CORE EXCEL FORMATTING FUNCTION (Oprava konverze typů zachována)
# ======================================================
def format_sheet(ws, rows: List[Dict[str, Any]], headers: List[str]):
"""
Applies ALL formatting rules to a worksheet:
FIX: Explicitní konverze datových typů pro OpenPyXL zachována.
"""
# -------------------------------
# 1) Format HEADER row
# -------------------------------
for col_idx in range(1, len(headers) + 1):
cell = ws.cell(row=1, column=col_idx)
cell.font = Font(bold=True)
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid")
# -------------------------------
# 2) Write DATA rows (OPRAVA KONVERZE TYPŮ)
# -------------------------------
# Klíčová konverze pro Decimal/Date objekty
for row in rows:
excel_row = []
for h in headers:
val = row[h]
# 🛠️ KLÍČOVÁ OPRAVA: Konverze MySQL/Decimal objektů na nativní Python typy
if isinstance(val, Decimal):
val = float(val)
elif isinstance(val, dt_date) and not isinstance(val, datetime):
val = val.strftime("%Y-%m-%d")
# -----------------------------------------------------------
# Pro text-sensitive sloupce (ID, symboly), zapisuj ="hodnota"
if h in TEXT_COLUMNS and val is not None:
val = str(val)
excel_row.append(f'="{val}"')
else:
excel_row.append(val)
ws.append(excel_row)
# -------------------------------
# 3) Background coloring by "amount"
# -------------------------------
fill_red = PatternFill(start_color="FFFFDDDD", end_color="FFFFDDDD", fill_type="solid")
fill_green = PatternFill(start_color="FFEEFFEE", end_color="FFEEFFEE", fill_type="solid")
try:
amount_col_index = headers.index("amount") + 1
except ValueError:
amount_col_index = -1
if amount_col_index != -1:
for row_idx in range(2, len(rows) + 2):
cell_amount = ws.cell(row=row_idx, column=amount_col_index)
try:
value = float(str(cell_amount.value).strip('="'))
except:
value = 0
fill = fill_red if value < 0 else fill_green
for col_idx in range(1, len(headers) + 1):
ws.cell(row=row_idx, column=col_idx).fill = fill
# -------------------------------
# 4) Fixed column widths (22 sloupců)
# -------------------------------
fixed_widths = [
13, 14, 11, 14, 8, 14, 11, 30, 30, 25,
13, 13, 13, 35, 30, 15, 13, 30, 20, 13,
30, 20
]
if len(fixed_widths) < len(headers):
fixed_widths.extend([15] * (len(headers) - len(fixed_widths)))
for i, width in enumerate(fixed_widths, start=1):
col_letter = chr(64 + i)
ws.column_dimensions[col_letter].width = width
# -------------------------------
# 5) Add borders + alignment
# -------------------------------
thin = Side(border_style="thin", color="000000")
border = Border(left=thin, right=thin, top=thin, bottom=thin)
align_center = Alignment(horizontal="center")
total_rows = len(rows) + 1
total_cols = len(headers)
ALIGN_CENTER_COLS = ["id_operace", "transaction_date", "currency", "kod_banky", "vs", "ks", "ss"]
center_indices = [headers.index(col) + 1 for col in ALIGN_CENTER_COLS if col in headers]
for row_idx in range(1, total_rows + 1):
for col_idx in range(1, total_cols + 1):
cell = ws.cell(row=row_idx, column=col_idx)
cell.border = border
if col_idx in center_indices:
cell.alignment = align_center
ws.freeze_panes = "A2"
ws.auto_filter.ref = ws.dimensions
# ======================================================
# MAIN EXPORT PROCESS
# ======================================================
def export_fio():
print("Connecting to MySQL...")
# Connect to MySQL database
try:
conn = mysql.connector.connect(
host=DB_HOST,
port=DB_PORT,
user=DB_USER,
password=DB_PASS,
database=DB_NAME
)
except Error as e:
print("❌ Failed to connect:", e)
return
# Používáme dictionary=True pro získání dat jako slovník
cur = conn.cursor(dictionary=True)
# -------------------------------
# Load accounts.json
# -------------------------------
with open(ACCOUNTS_JSON, "r", encoding="utf-8") as f:
accounts = json.load(f)
# -------------------------------
# Define priority first sheets
# -------------------------------
preferred_order = [
"CZK rodina",
"CZK ordinace",
"CZK na jídlo",
"CZK TrialHelp",
"CZK maminka svojě věci"
]
accounts_sorted = []
# Step 1: add priority accounts first
for pref in preferred_order:
for acc in accounts:
if acc["name"] == pref:
accounts_sorted.append(acc)
# Step 2: add remaining accounts afterward
for acc in accounts:
if acc not in accounts_sorted:
accounts_sorted.append(acc)
# -------------------------------
# Create a new Excel workbook
# -------------------------------
wb = Workbook()
wb.remove(wb.active) # remove default empty sheet
# -------------------------------
# FIRST SHEET: ALL TRANSACTIONS
# -------------------------------
cur.execute("SELECT * FROM transactions ORDER BY transaction_date DESC")
all_rows = cur.fetchall()
if all_rows:
headers = list(all_rows[0].keys())
# Tisk počtu řádků pro "ALL"
print(f"➡ Sheet: ALL | Řádků z DB: {len(all_rows)}")
ws_all = wb.create_sheet(title="ALL")
ws_all.append(headers)
format_sheet(ws_all, all_rows, headers)
# -------------------------------
# INDIVIDUAL SHEETS PER ACCOUNT
# -------------------------------
for acc in accounts_sorted:
acc_num = acc["account_number"]
sheet_name = acc["name"][:31] # Excel sheet name limit
print(f"➡ Creating sheet: {sheet_name}", end=' | ') # Tisk názvu listu
query = f"""
SELECT *
FROM transactions
WHERE cislo_uctu = '{acc_num}'
ORDER BY transaction_date DESC
"""
cur.execute(query)
rows = cur.fetchall()
# VÝPIS POČTU ZÁZNAMŮ Z DB
print(f"Řádků z DB: {len(rows)}")
if not rows:
print(f"⚠ No data for {sheet_name}")
continue
headers = list(rows[0].keys())
ws = wb.create_sheet(title=sheet_name)
ws.append(headers)
format_sheet(ws, rows, headers)
conn.close()
# -------------------------------
# Save Excel file
# -------------------------------
cleanup_old_exports()
# File name includes timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
filename = f"{timestamp} FIO transactions.xlsx"
output_file = os.path.join(OUTPUT_DIR, filename)
wb.save(output_file)
print(f"✅ Export complete:\n{output_file}")
# ======================================================
# MAIN ENTRY POINT
# ======================================================
if __name__ == "__main__":
export_fio()

View File

@@ -0,0 +1,268 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
# UTF-8 console for Scheduled Tasks
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
import mysql.connector
from mysql.connector import Error
from openpyxl import Workbook
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
from datetime import datetime, date as dt_date
from decimal import Decimal
from pathlib import Path
# ==============================
# DELETE OLD REPORTS (OPTION C)
# ==============================
def delete_all_old_reports(directory: Path):
"""Deletes all previously generated ordinace expense reports."""
pattern = "*fio ordinace transactions.xlsx"
deleted = 0
for f in directory.glob(pattern):
try:
f.unlink()
deleted += 1
print(f"🗑 Deleted old report: {f.name}")
except Exception as e:
print(f"❌ Could not delete {f.name}: {e}")
if deleted == 0:
print(" No old reports to delete.")
else:
print(f"✓ Deleted {deleted} old reports.")
# ======================================================
# CONFIG
# ======================================================
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
}
ORDINACE_ACCOUNT = "2800046620"
REPORTOVAT = {
"VZP": "1114007221",
"VOZP": "2010009091",
"ČPZP": "2054108761",
"OZP": "2070101041",
"ZPŠ": "2090309181",
"ZPMV": "2112108031",
}
OUTPUT_DIR = Path(r"z:\Dropbox\Ordinace\Reporty")
TEXT_COLUMNS = [
"cislo_uctu", "protiucet", "kod_banky",
"vs", "ks", "ss",
"id_operace", "id_pokynu"
]
# ======================================================
# FORMAT APPLYING (copied from main report)
# ======================================================
def format_sheet(ws, rows, headers):
# ---------------------- HEADER -----------------------
for col_idx in range(1, len(headers) + 1):
cell = ws.cell(row=1, column=col_idx)
cell.font = Font(bold=True)
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid")
# ---------------------- DATA ROWS --------------------
for row in rows:
excel_row = []
for h in headers:
val = row[h]
# Convert MySQL data types
if isinstance(val, Decimal):
val = float(val)
elif isinstance(val, dt_date):
val = val.strftime("%Y-%m-%d")
# For certain columns, force ="text"
if h in TEXT_COLUMNS and val is not None:
excel_row.append(f'="{val}"')
else:
excel_row.append(val)
ws.append(excel_row)
# ---------------------- COLORING ---------------------
fill_red = PatternFill(start_color="FFFFDDDD", fill_type="solid")
fill_green = PatternFill(start_color="FFEEFFEE", fill_type="solid")
try:
amount_col = headers.index("amount") + 1
except ValueError:
amount_col = -1
if amount_col != -1:
for r in range(2, len(rows) + 2):
cell = ws.cell(row=r, column=amount_col)
try:
value = float(str(cell.value).strip('="'))
except:
value = 0
fill = fill_red if value < 0 else fill_green
for c in range(1, len(headers) + 1):
ws.cell(row=r, column=c).fill = fill
# ---------------------- COLUMN WIDTHS -----------------
fixed_widths = [
13, 14, 11, 14, 8, 14, 11, 30, 30, 25,
13, 13, 13, 35, 30, 15, 13, 30, 20, 13,
30, 20
]
if len(fixed_widths) < len(headers):
fixed_widths.extend([15] * (len(headers) - len(fixed_widths)))
for i, width in enumerate(fixed_widths, start=1):
letter = chr(64 + i)
ws.column_dimensions[letter].width = width
# ---------------------- BORDERS & ALIGNMENT ----------
thin = Side(border_style="thin", color="000000")
border = Border(left=thin, right=thin, top=thin, bottom=thin)
align_center = Alignment(horizontal="center")
center_cols = ["id_operace", "transaction_date", "currency", "kod_banky", "vs", "ks", "ss"]
center_indices = [headers.index(c) + 1 for c in center_cols if c in headers]
total_rows = len(rows) + 1
total_cols = len(headers)
for r in range(1, total_rows + 1):
for c in range(1, total_cols + 1):
cell = ws.cell(row=r, column=c)
cell.border = border
if c in center_indices:
cell.alignment = align_center
ws.freeze_panes = "A2"
ws.auto_filter.ref = ws.dimensions
# ======================================================
# EXPORT
# ======================================================
def export_ordinace():
print("Connecting MySQL...")
conn = mysql.connector.connect(**DB)
cur = conn.cursor(dictionary=True)
# ============================
# Load ALL transactions for ordinace
# ============================
sql_all = f"""
SELECT *
FROM transactions
WHERE cislo_uctu = '{ORDINACE_ACCOUNT}'
ORDER BY transaction_date DESC;
"""
cur.execute(sql_all)
all_rows = cur.fetchall()
if not all_rows:
print("❌ No transactions found for ordinace account.")
return
headers = list(all_rows[0].keys())
# Workbook
wb = Workbook()
wb.remove(wb.active)
# --------------------- ALL sheet ---------------------
ws_all = wb.create_sheet("ALL ordinace")
ws_all.append(headers)
format_sheet(ws_all, all_rows, headers)
print(f"➡ ALL ordinace rows: {len(all_rows)}")
# --------------------- INSURANCE sheets ---------------
summary = []
for name, acc in REPORTOVAT.items():
print(f"➡ Pojišťovna {name} ({acc})")
sql = f"""
SELECT *
FROM transactions
WHERE cislo_uctu = '{ORDINACE_ACCOUNT}'
AND (
protiucet <> '2070101041'
OR (protiucet = '2070101041' AND amount > 0)
)
AND protiucet = '{acc}'
ORDER BY transaction_date DESC;
"""
cur.execute(sql)
rows = cur.fetchall()
count = len(rows)
summa = sum(float(r["amount"]) for r in rows) if rows else 0
summary.append({
"Pojišťovna": name,
"Účet": acc,
"Počet transakcí": count,
"Součet": summa
})
if not rows:
print(f" ⚠ No rows")
continue
ws = wb.create_sheet(name)
ws.append(headers)
format_sheet(ws, rows, headers)
print(f"{count} rows, sum {summa:.2f}")
# --------------------- SUMMARY sheet -----------------
ws_s = wb.create_sheet("Přehled")
ws_s.append(["Pojišťovna", "Účet", "Počet transakcí", "Součet Kč"])
for row in summary:
ws_s.append([
row["Pojišťovna"],
row["Účet"],
row["Počet transakcí"],
f"{row['Součet']:.2f}"
])
# ===========================
# Save Excel
# ===========================
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
out_file = OUTPUT_DIR / f"{timestamp} FIO ordinace transactions.xlsx"
wb.save(out_file)
print(f"\n✅ Export hotový:\n{out_file}")
# ======================================================
# MAIN
# ======================================================
if __name__ == "__main__":
delete_all_old_reports(OUTPUT_DIR)
export_ordinace()

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Expenses Ordinace Report Generator (2025)
------------------------------------------
Reads JSON with tab definitions + SQL queries.
Creates one Excel workbook with multiple sheets.
Uniform formatting for all tabs.
Deletes old reports before saving the new one.
"""
import json
import pandas as pd
import pymysql
from datetime import datetime
from pathlib import Path
from openpyxl import Workbook
from openpyxl.styles import Font, Alignment, PatternFill, Border, Side
from openpyxl.utils.dataframe import dataframe_to_rows
# ==============================
# CONFIG
# ==============================
JSON_TABS = r"expenses_tabs.json"
MYSQL = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4"
}
EXPORT_DIR = Path(r"z:\Dropbox\Ordinace\Reporty")
EXPORT_DIR.mkdir(exist_ok=True, parents=True)
# ==============================
# DELETE OLD REPORTS (OPTION C)
# ==============================
def delete_all_old_reports(directory: Path):
"""Deletes all previously generated ordinace expense reports."""
pattern = "*fio ordinace expenses.xlsx"
deleted = 0
for f in directory.glob(pattern):
try:
f.unlink()
deleted += 1
print(f"🗑 Deleted old report: {f.name}")
except Exception as e:
print(f"❌ Could not delete {f.name}: {e}")
if deleted == 0:
print(" No old reports to delete.")
else:
print(f"✓ Deleted {deleted} old reports.")
# ==============================
# FORMATTING HELPERS
# ==============================
def format_sheet(ws):
"""Apply column widths, header styling, borders, autofilter."""
# Yellow header (Option A)
header_fill = PatternFill("solid", fgColor="FFF200")
bold_font = Font(bold=True, color="000000")
center_align = Alignment(horizontal="center", vertical="center")
thin = Side(border_style="thin", color="000000")
border = Border(left=thin, right=thin, top=thin, bottom=thin)
# Autofilter
if ws.max_row > 1:
ws.auto_filter.ref = ws.dimensions
# Auto column widths
for col in ws.columns:
max_len = 0
letter = col[0].column_letter
for cell in col:
try:
max_len = max(max_len, len(str(cell.value)))
except Exception:
pass
ws.column_dimensions[letter].width = min(max_len + 2, 50)
# Style header row
for cell in ws[1]:
cell.font = bold_font
cell.fill = header_fill
cell.alignment = center_align
cell.border = border
# Border for all body cells
for row in ws.iter_rows(min_row=2):
for cell in row:
cell.border = border
# ==============================
# MAIN
# ==============================
def main():
print("=== Expenses Ordinace Report (with cleanup) ===")
# Load JSON tabs
with open(JSON_TABS, "r", encoding="utf-8") as f:
config = json.load(f)
tabs = config.get("tabs", [])
print(f"Loaded {len(tabs)} tab definitions.")
# Connect DB
conn = pymysql.connect(**MYSQL)
# Prepare workbook
wb = Workbook()
wb.remove(wb.active)
# Process each tab
for tab in tabs:
name = tab["name"]
sql = tab["sql"]
print(f"→ Running tab: {name}")
df = pd.read_sql(sql, conn)
df = df.fillna("")
# Swap columns N (index 13) and O (index 14)
cols = df.columns.tolist()
if len(cols) >= 15:
cols[13], cols[14] = cols[14], cols[13]
df = df[cols]
# Create sheet
sheet_name = name[:31]
ws = wb.create_sheet(sheet_name)
# Write DataFrame
for row in dataframe_to_rows(df, index=False, header=True):
ws.append(row)
# Apply formatting
format_sheet(ws)
conn.close()
# Delete older reports
delete_all_old_reports(EXPORT_DIR)
# Save new report
OUTFILE = EXPORT_DIR / f"{datetime.now():%Y-%m-%d %H-%M-%S} FIO ordinace expenses.xlsx"
wb.save(OUTFILE)
print(f"\n✔ Report generated:\n{OUTFILE}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,262 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import time
import json
from pathlib import Path
import hashlib
import mysql.connector
from mysql.connector import Error
# ====================================================================
# UTF-8 OUTPUT
# ====================================================================
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
# ====================================================================
# CONFIGURATION
# ====================================================================
BASE_DIR = Path(r"z:\Dropbox\!!!Days\Downloads Z230\Fio")
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
DEBUG_ON_ERROR = True
# ====================================================================
# HELPERS
# ====================================================================
def safe_col(t: dict, n: int):
key = f"column{n}"
val = t.get(key)
return val.get("value") if val else None
def clean_date(dt_str: str):
if not dt_str:
return None
return dt_str[:10]
def generate_fallback_id(account_id: str, t: dict) -> str:
raw_date = clean_date(safe_col(t, 0)) or ""
amount = str(safe_col(t, 1) or "")
protiucet = str(safe_col(t, 2) or "")
vs = str(safe_col(t, 5) or "")
src = f"{account_id}|{raw_date}|{amount}|{protiucet}|{vs}"
digest = hashlib.sha1(src.encode("utf-8")).hexdigest()
return digest[:20]
def load_json_file(p: Path):
try:
with open(p, "r", encoding="utf-8") as f:
return json.load(f)
except Exception as e:
print(f" ❌ Nelze načíst JSON: {p}{e}")
return None
# ====================================================================
# MAIN
# ====================================================================
def main():
start_all = time.time()
print("=== Fio HISTORICKÝ IMPORT (ze všech JSON na disku) ===\n", flush=True)
print(f"Hledám JSON soubory v: {BASE_DIR}", flush=True)
# Najdeme všechny JSONy ve všech podadresářích
all_json_paths = list(BASE_DIR.rglob("*.json"))
print(f"Nalezeno JSON souborů: {len(all_json_paths)}\n", flush=True)
if not all_json_paths:
print("Nenalezeny žádné JSON soubory. Konec.")
return
# DB připojení
try:
conn = mysql.connector.connect(
host=DB["host"],
port=DB["port"],
user=DB["user"],
password=DB["password"],
database=DB["database"],
charset=DB["charset"]
)
cur = conn.cursor()
except Error as e:
print(f"FATAL DB ERROR: {e}")
return
sql = """
INSERT INTO transactions
(
cislo_uctu, id_operace, transaction_date, amount, currency,
protiucet, kod_banky, nazev_protiuctu, nazev_banky, typ,
vs, ks, ss, uziv_identifikace, zprava_pro_prijemce,
provedl, id_pokynu, komentar, upr_objem_mena, api_bic, reference_platce
)
VALUES
(
%(cislo_uctu)s, %(id_operace)s, %(transaction_date)s, %(amount)s, %(currency)s,
%(protiucet)s, %(kod_banky)s, %(nazev_protiuctu)s, %(nazev_banky)s, %(typ)s,
%(vs)s, %(ks)s, %(ss)s, %(uziv_identifikace)s, %(zprava_pro_prijemce)s,
%(provedl)s, %(id_pokynu)s, %(komentar)s, %(upr_objem_mena)s, %(api_bic)s, %(reference_platce)s
)
ON DUPLICATE KEY UPDATE
transaction_date = VALUES(transaction_date),
amount = VALUES(amount),
currency = VALUES(currency),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
nazev_banky = VALUES(nazev_banky),
typ = VALUES(typ),
vs = VALUES(vs),
ks = VALUES(ks),
ss = VALUES(ss),
uziv_identifikace = VALUES(uziv_identifikace),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
provedl = VALUES(provedl),
id_pokynu = VALUES(id_pokynu),
komentar = VALUES(komentar),
upr_objem_mena = VALUES(upr_objem_mena),
api_bic = VALUES(api_bic),
reference_platce = VALUES(reference_platce)
"""
total_processed_files = 0
total_rows_inserted = 0
total_rows_skipped = 0
# ============================================
# PROCES JSON SOUBOR PO SOUBORU
# ============================================
for p in all_json_paths:
total_processed_files += 1
print(f"--- Soubor {total_processed_files}/{len(all_json_paths)}: {p}", flush=True)
data = load_json_file(p)
if not data:
continue
# JSON má strukturu jako při fetchnutí z API
account_info = data.get("accountStatement", {}).get("info", {})
account_id = account_info.get("accountId")
if not account_id:
print(" ⚠ Nelze zjistit cislo_uctu z JSON! Přeskakuji.")
continue
tlist = data.get("accountStatement", {}).get("transactionList", {}).get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f" Počet transakcí: {len(tlist)}", flush=True)
if not tlist:
continue
rows = []
skipped_local = 0
# Převést transakce → DB řádky
for t in tlist:
id_operace_val = safe_col(t, 22)
if id_operace_val is None:
id_operace_val = generate_fallback_id(account_id, t)
transaction_date = clean_date(safe_col(t, 0))
if not transaction_date:
skipped_local += 1
continue
id_pokynu_val = safe_col(t, 19)
row = {
"cislo_uctu": account_id,
"id_operace": str(id_operace_val),
"transaction_date": transaction_date,
"amount": safe_col(t, 1),
"currency": safe_col(t, 14),
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"nazev_protiuctu": safe_col(t, 10),
"nazev_banky": safe_col(t, 12),
"api_bic": safe_col(t, 26),
"typ": safe_col(t, 8),
"provedl": safe_col(t, 9),
"vs": safe_col(t, 5),
"ks": safe_col(t, 4),
"ss": safe_col(t, 6),
"zprava_pro_prijemce": safe_col(t, 16),
"uziv_identifikace": safe_col(t, 7),
"komentar": safe_col(t, 25),
"upr_objem_mena": safe_col(t, 18),
"id_pokynu": str(id_pokynu_val) if id_pokynu_val else None,
"reference_platce": safe_col(t, 27),
}
rows.append(row)
total_rows_skipped += skipped_local
print(f" Přeskočeno transakcí bez data/PK: {skipped_local}")
# Batch insert
inserted = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i: i + BATCH_SIZE]
try:
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
except Error as e:
print(f" ❌ Batch insert error: {e}")
conn.rollback()
if DEBUG_ON_ERROR:
print(" ► Per-row insert for debugging…")
for row in chunk:
try:
cur.execute(sql, row)
conn.commit()
inserted += 1
except Error as e_row:
conn.rollback()
print(f" ✗ Chyba transakce id_operace={row['id_operace']}{e_row}")
total_rows_inserted += inserted
print(f" ✓ Zapsáno/aktualizováno: {inserted}")
# ======================
# ZÁVĚR
# ======================
cur.close()
conn.close()
elapsed = time.time() - start_all
print("\n===== HOTOVO =====", flush=True)
print(f"Souborů zpracováno: {total_processed_files}")
print(f"Transakcí zapsáno/aktualizováno: {total_rows_inserted}")
print(f"Transakcí přeskočeno: {total_rows_skipped}")
print(f"Celkový čas: {elapsed:.2f} s")
print("==================", flush=True)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,354 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import time
from datetime import date, timedelta
from pathlib import Path
import json
import requests
import mysql.connector
from mysql.connector import Error
from typing import Dict, Any, List
# ====================================================================
# A. PONECHÁNO: Vynucení UTF-8 pro správnou diakritiku v plánovaných úlohách
# ====================================================================
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
"""
FIO MULTIACCOUNT IMPORTER — KONEČNÁ VERZE S AUDITEM
============================================================
Přidána zpětná kontrola (audit) po každé dávce, která ověřuje,
že se ID transakcí skutečně zapsaly do databáze.
"""
# =========================================
# CONFIGURATION
# =========================================
# JSON file containing multiple account configs:
# [
#    { "name": "CZK rodina", "account_number": "2100046291", "token": "xxx" },
#    ...
# ]
ACCOUNTS_FILE = r"/accounts.json"
# Directory where raw JSON files from Fio API will be stored.
JSON_BASE_DIR = r"z:\Dropbox\!!!Days\Downloads Z230\Fio"
# MySQL connection parameters
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
# How many transactions insert per batch (performance tuning)
BATCH_SIZE = 500
# How many days back we load from Fio (default = last 90 days)
DAYS_BACK = 90
# =========================================
# HELPERS (Beze změny)
# =========================================
def load_accounts(path: str) -> List[Dict[str, str]]:
"""Reads accounts.json and validates content."""
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing '{key}' in account config: {acc}")
return accounts
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
"""Constructs the URL for Fio REST API periods endpoint."""
from_str = d_from.strftime("%Y-%m-%d")
to_str = d_to.strftime("%Y-%m-%d")
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date) -> Any:
"""Calls Fio API and fetches JSON."""
url = fio_url_for_period(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f"  ❌ HTTP {resp.status_code} from Fio: {url}", flush=True)
return None
try:
return resp.json()
except json.JSONDecodeError:
print("  ❌ Cannot decode JSON from Fio response", flush=True)
return None
def safe_col(t: dict, n: int) -> Any:
"""SAFE ACCESSOR for Fio transaction column numbers (ColumnN)."""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt_str: str) -> str:
"""Strips timezone from Fio date string ("YYYY-MM-DD+HH:MM") → "YYYY-MM-DD"."""
if not dt_str:
return None
return str(dt_str)[:10]
def ensure_dir(path: Path):
"""Creates directory if it doesnt exist."""
path.mkdir(parents=True, exist_ok=True)
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date) -> Path:
"""Saves raw JSON to disk."""
acc_num_raw = account_cfg["account_number"]
acc_folder_name = acc_num_raw.replace("/", "_")
out_dir = Path(base_dir) / acc_folder_name
ensure_dir(out_dir)
filename = f"{d_from.strftime('%Y-%m-%d')}_to_{d_to.strftime('%Y-%m-%d')}.json"
out_path = out_dir / filename
with open(out_path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return out_path
# =========================================
# MAIN IMPORT LOGIC
# =========================================
def main():
start_all = time.time()
# Calculate time range (last N days)
today = date.today()
d_from = today - timedelta(days=DAYS_BACK)
d_to = today
print("=== Fio multi-account import v2 (NOVÝ KONEKTOR + AUDIT) ===", flush=True)
print(f"Období: {d_from}{d_to}", flush=True)
# Load all accounts from accounts.json
try:
accounts = load_accounts(ACCOUNTS_FILE)
except Exception as e:
print(f"FATÁLNÍ CHYBA při načítání účtů: {e}", flush=True)
return
print(f"  Účtů v konfiguraci: {len(accounts)}\n", flush=True)
# ZMĚNA: Připojení pomocí mysql.connector
try:
conn = mysql.connector.connect(
host=DB["host"],
port=DB["port"],
user=DB["user"],
password=DB["password"],
database=DB["database"],
charset=DB["charset"]
)
cur = conn.cursor()
except Error as e:
print(f"FATÁLNÍ CHYBA při připojení k DB: {e}", flush=True)
return
# SQL INSERT dotaz přizpůsobený nové DB struktuře
sql = """
INSERT INTO transactions
(id_operace, cislo_uctu, transaction_date, amount, currency, \
protiucet, kod_banky, nazev_protiuctu, nazev_banky, typ, \
vs, ks, ss, uziv_identifikace, zprava_pro_prijemce, \
provedl, id_pokynu, komentar, upr_objem_mena, api_bic, reference_platce)
VALUES (%(id_operace)s, %(cislo_uctu)s, %(transaction_date)s, %(amount)s, %(currency)s,
%(protiucet)s, %(kod_banky)s, %(nazev_protiuctu)s, %(nazev_banky)s, %(typ)s,
%(vs)s, %(ks)s, %(ss)s, %(uziv_identifikace)s, %(zprava_pro_prijemce)s,
%(provedl)s, %(id_pokynu)s, %(komentar)s, %(upr_objem_mena)s, %(api_bic)s, \
%(reference_platce)s) ON DUPLICATE KEY \
UPDATE \
cislo_uctu = \
VALUES (cislo_uctu), transaction_date = \
VALUES (transaction_date), amount = \
VALUES (amount), currency = \
VALUES (currency), protiucet = \
VALUES (protiucet), kod_banky = \
VALUES (kod_banky), nazev_protiuctu = \
VALUES (nazev_protiuctu), nazev_banky = \
VALUES (nazev_banky), typ = \
VALUES (typ), vs = \
VALUES (vs), ks = \
VALUES (ks), ss = \
VALUES (ss), uziv_identifikace = \
VALUES (uziv_identifikace), zprava_pro_prijemce = \
VALUES (zprava_pro_prijemce), provedl = \
VALUES (provedl), id_pokynu = \
VALUES (id_pokynu), komentar = \
VALUES (komentar), upr_objem_mena = \
VALUES (upr_objem_mena), api_bic = \
VALUES (api_bic), reference_platce = \
VALUES (reference_platce) \
"""
total_inserted = 0
# ======================================================
# PROCESS EACH ACCOUNT IN accounts.json
# ======================================================
for acc in accounts:
name = acc["name"]
cfg_acc_num = acc["account_number"]
token = acc["token"]
print(f"--- Účet: {name} ({cfg_acc_num}) ---", flush=True)
t0 = time.time()
# --- 1) Download JSON from Fio API
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print("  Přeskakuji, žádná data / chyba API.\n", flush=True)
continue
# --- 2) Save raw JSON file to disk
try:
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f"  JSON uložen do: {json_path}", flush=True)
except Exception as e:
print(f"  ❌ Chyba při ukládání JSON souboru: {e}", flush=True)
pass
# --- 3) Extract transactions from JSON tree
tlist = data.get("accountStatement", {}).get("transactionList", {}).get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f"  Počet transakcí v období: {len(tlist)}", flush=True)
if not tlist:
print("  Žádné transakce, jdu dál.\n", flush=True)
continue
fio_acc_id = data.get("accountStatement", {}).get("info", {}).get("accountId")
if cfg_acc_num and fio_acc_id and cfg_acc_num.split("/")[0] not in fio_acc_id:
print(f"  ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})", flush=True)
# --- 4) Build list of MySQL rows (MAPOVÁNÍ S KONVERZÍ NA STR)
rows = []
for t in tlist:
id_operace_val = safe_col(t, 22)
id_pokynu_val = safe_col(t, 17)
row = {
"id_operace": str(id_operace_val) if id_operace_val is not None else None,
"cislo_uctu": fio_acc_id,
"transaction_date": clean_date(safe_col(t, 0)),
"amount": safe_col(t, 1),
"currency": safe_col(t, 14),
"typ": safe_col(t, 8),
"provedl": safe_col(t, 9),
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"nazev_protiuctu": safe_col(t, 10),
"nazev_banky": safe_col(t, 12),
"api_bic": safe_col(t, 26),
"vs": safe_col(t, 5),
"ks": safe_col(t, 4),
"ss": safe_col(t, 6),
"zprava_pro_prijemce": safe_col(t, 16),
"uziv_identifikace": safe_col(t, 7),
"komentar": safe_col(t, 25),
"upr_objem_mena": safe_col(t, 18),
"id_pokynu": str(id_pokynu_val) if id_pokynu_val is not None else None,
"reference_platce": safe_col(t, 27),
}
rows.append(row)
# --- 5) INSERT rows into MySQL in batches S AUDITEM
inserted = 0
actual_inserted_count = 0 # Počet řádků potvrzených auditem
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i: i + BATCH_SIZE]
# Získej ID transakcí z aktuální dávky pro audit
chunk_ids = [row["id_operace"] for row in chunk if row["id_operace"] is not None]
try:
# Krok 5.1: Provedení zápisu
cur.executemany(sql, chunk)
conn.commit()
# --- Krok 5.2: AUDIT: Zpětná kontrola ---
if chunk_ids:
# Vytvoření stringu ID pro SQL dotaz: ('id1', 'id2', ...)
id_string = ', '.join([f"'{i}'" for i in chunk_ids])
audit_query = f"SELECT COUNT(*) FROM transactions WHERE id_operace IN ({id_string})"
cur.execute(audit_query)
# ZMĚNA: Používáme fetchone()[0] pro mysql.connector
found_count = cur.fetchone()[0]
if found_count != len(chunk):
print(
f"  ⚠ AUDIT SELHAL: Zapsáno {len(chunk)}, ale v databázi nalezeno jen {found_count} pro tuto dávku!",
flush=True)
# Pokud audit selhal, tiskneme, která ID chybí (pro debug)
# To je složitější, ale alespoň víme, kolik jich chybí.
actual_inserted_count += found_count
else:
# Pokud dávka neobsahuje ID (což by neměla, ale pro jistotu)
actual_inserted_count += len(chunk)
inserted += len(chunk)
except Error as e:
print(f"  ❌ Chyba při zápisu do DB: {e}", flush=True)
conn.rollback()
break # Přerušíme cyklus, pokud nastane kritická chyba
elapsed = time.time() - t0
total_inserted += actual_inserted_count # Sčítáme počet potvrzený auditem
print(f"  ✓ Zapsáno (potvrzeno auditem): {actual_inserted_count} řádků do DB za {elapsed:.2f} s\n", flush=True)
# Nyní by měl tento počet odpovídat počtu transakcí pro daný účet, pokud nebyly žádné duplicity.
# Close DB
cur.close()
conn.close()
total_elapsed = time.time() - start_all
print(f"=== Hotovo. Celkem zapsáno (potvrzeno auditem) {total_inserted} transakcí. "
f"Celkový čas: {total_elapsed:.2f} s ===", flush=True)
# ======================================================
# ENTRY POINT
# ======================================================
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,222 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import time
import json
# ZMĚNA: Importujeme oficiální konektor
import mysql.connector
from mysql.connector import Error
from typing import Dict, Any, List
from pathlib import Path
from datetime import date
# ====================================================================
# A. PONECHÁNO: Vynucení UTF-8 pro správnou diakritiku
# ====================================================================
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
# =========================================
# KONFIGURACE PRO TEST
# =========================================
TEST_JSON_PATH = r"z:\Dropbox\!!!Days\Downloads Z230\Fio\2100074583\2025-09-06_to_2025-12-05.json"
TEST_ACCOUNT_ID = "2100074583"
# MySQL connection parameters
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8", # mysql-connector používá 'utf8' nebo 'utf8mb4'
}
# =========================================
# POMOCNÉ FUNKCE (Z vašeho původního kódu)
# =========================================
def safe_col(t: dict, n: int) -> Any:
"""SAFE ACCESSOR for Fio transaction column numbers (ColumnN)."""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt_str: str) -> str:
"""Strips timezone from Fio date string ("YYYY-MM-DD+HH:MM") → "YYYY-MM-DD"."""
if not dt_str:
return None
return str(dt_str)[:10]
def load_test_data(path: str) -> Any:
"""Načte data přímo ze souboru JSON."""
try:
with open(path, "r", encoding="utf-8") as f:
return json.load(f)
except Exception as e:
print(f"FATÁLNÍ CHYBA: Nelze načíst JSON soubor z {path}: {e}", flush=True)
return None
# =========================================
# HLAVNÍ TESTOVACÍ LOGIKA
# =========================================
def run_test():
start_all = time.time()
print("=== TEST VKLÁDÁNÍ DAT Z JSON SOUBORU (OPRAVA S MYSQL-CONNECTOR) ===", flush=True)
print(f"Zdroj dat: {TEST_JSON_PATH}", flush=True)
# KROK 1: Načtení dat přímo z JSON souboru
data = load_test_data(TEST_JSON_PATH)
if data is None:
return
# KROK 2: Extrakce transakcí
tlist = data.get("accountStatement", {}).get("transactionList", {}).get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
fio_acc_id = data.get("accountStatement", {}).get("info", {}).get("accountId")
expected_count = len(tlist)
print(f"Očekávaný počet transakcí v JSON: {expected_count}", flush=True)
if expected_count == 0:
print("Test přeskočen, JSON je prázdný.", flush=True)
return
# KROK 3: Připojení k DB
try:
# ZMĚNA: Používáme mysql.connector
conn = mysql.connector.connect(
host=DB["host"],
port=DB["port"],
user=DB["user"],
password=DB["password"],
database=DB["database"],
charset=DB["charset"]
)
cur = conn.cursor()
# ZMĚNA: Používáme dictionary=True, což je zde podporováno
cur_dict = conn.cursor(dictionary=True)
except Error as e:
print(f"FATÁLNÍ CHYBA: Nelze se připojit k DB: {e}", flush=True)
return
# KROK 4: Sestavení SQL dotazu a řádků (Konverze ID na string zůstává pro bezpečnost)
sql = """
INSERT INTO transactions
(id_operace, cislo_uctu, transaction_date, amount, currency, \
protiucet, kod_banky, nazev_protiuctu, nazev_banky, typ, \
vs, ks, ss, uziv_identifikace, zprava_pro_prijemce, \
provedl, id_pokynu, komentar, upr_objem_mena, api_bic, reference_platce)
VALUES (%(id_operace)s, %(cislo_uctu)s, %(transaction_date)s, %(amount)s, %(currency)s,
%(protiucet)s, %(kod_banky)s, %(nazev_protiuctu)s, %(nazev_banky)s, %(typ)s,
%(vs)s, %(ks)s, %(ss)s, %(uziv_identifikace)s, %(zprava_pro_prijemce)s,
%(provedl)s, %(id_pokynu)s, %(komentar)s, %(upr_objem_mena)s, %(api_bic)s, \
%(reference_platce)s) ON DUPLICATE KEY \
UPDATE \
cislo_uctu = \
VALUES (cislo_uctu), transaction_date = \
VALUES (transaction_date), amount = \
VALUES (amount), currency = \
VALUES (currency), protiucet = \
VALUES (protiucet), kod_banky = \
VALUES (kod_banky), nazev_protiuctu = \
VALUES (nazev_protiuctu), nazev_banky = \
VALUES (nazev_banky), typ = \
VALUES (typ), vs = \
VALUES (vs), ks = \
VALUES (ks), ss = \
VALUES (ss), uziv_identifikace = \
VALUES (uziv_identifikace), zprava_pro_prijemce = \
VALUES (zprava_pro_prijemce), provedl = \
VALUES (provedl), id_pokynu = \
VALUES (id_pokynu), komentar = \
VALUES (komentar), upr_objem_mena = \
VALUES (upr_objem_mena), api_bic = \
VALUES (api_bic), reference_platce = \
VALUES (reference_platce) \
"""
rows = []
for t in tlist:
id_operace_val = safe_col(t, 22)
id_pokynu_val = safe_col(t, 17)
row = {
# Klíčová konverze ID na string (VARCHAR)
"id_operace": str(id_operace_val) if id_operace_val is not None else None,
"cislo_uctu": fio_acc_id,
"transaction_date": clean_date(safe_col(t, 0)),
"amount": safe_col(t, 1),
"currency": safe_col(t, 14),
"typ": safe_col(t, 8),
"provedl": safe_col(t, 9),
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"nazev_protiuctu": safe_col(t, 10),
"nazev_banky": safe_col(t, 12),
"api_bic": safe_col(t, 26),
"vs": safe_col(t, 5),
"ks": safe_col(t, 4),
"ss": safe_col(t, 6),
"zprava_pro_prijemce": safe_col(t, 16),
"uziv_identifikace": safe_col(t, 7),
"komentar": safe_col(t, 25),
"upr_objem_mena": safe_col(t, 18),
"id_pokynu": str(id_pokynu_val) if id_pokynu_val is not None else None,
"reference_platce": safe_col(t, 27),
}
rows.append(row)
# KROK 5: Vložení dat do DB
inserted = 0
try:
# Používáme executemany s připravenými daty
cur.executemany(sql, rows)
conn.commit()
inserted = len(rows)
except Error as e:
print(f"  ❌ Chyba při VKLÁDÁNÍ do DB: {e}", flush=True)
conn.rollback()
# KROK 6: Kontrola výsledku v DB (používá Dictionary=True)
check_query = f"SELECT count(*) AS count FROM transactions WHERE cislo_uctu = '{fio_acc_id}'"
cur_dict.execute(check_query)
current_db_count = cur_dict.fetchone()['count']
conn.close()
print(f"\n--- SHRNUTÍ TESTU ---", flush=True)
print(f"Očekávalo se vložení/aktualizace řádků: {expected_count}", flush=True)
print(f"Počet řádků zpracovaných skriptem: {inserted}", flush=True)
print(f"Aktuální počet záznamů pro účet {fio_acc_id} v DB: {current_db_count}", flush=True)
print(f"Celkový čas: {time.time() - start_all:.2f} s", flush=True)
if inserted == expected_count and current_db_count >= expected_count:
print("✅ TEST ÚSPĚŠNÝ: Všechny transakce byly vloženy/aktualizovány, nebo DB obsahuje očekávaný počet.",
flush=True)
else:
print("🔥 TEST SELHAL: Existuje nesoulad mezi očekávaným a skutečným počtem záznamů.", flush=True)
# ======================================================
# ENTRY POINT
# ======================================================
if __name__ == "__main__":
run_test()

View File

@@ -0,0 +1,53 @@
[
{
"name": "EUR tatínek 1",
"account_number": "2100074583",
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
},
{
"name": "CZK rodina",
"account_number": "2100046291",
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
},
{
"name": "EUR TrialHelp",
"account_number": "2200787265",
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
},
{
"name": "CZK tatínek",
"account_number": "2400046293",
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
},
{
"name": "CHF tatínek",
"account_number": "2402161017",
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
},
{
"name": "EUR tatínek 2",
"account_number": "2500074582",
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
},
{
"name": "CZK TrialHelp",
"account_number": "2900046548",
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
},
{
"name": "CZK maminka svojě věci",
"account_number": "2003310572",
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
},
{
"name": "CZK na jídlo",
"account_number": "2403310563",
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
},
{
"name": "CZK ordinace",
"account_number": "2800046620",
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
}
]

View File

@@ -0,0 +1,40 @@
{
"tabs": [
{
"name": "All expenses",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND amount < 0 AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Vakciny Ptacek",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '220205630' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Poliklinika Prosek",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '1387720540' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Card payments",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet IS NULL AND typ = 'Platba kartou' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Vakciny Avenier",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '5050012811' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Socialka",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '1011-7926201' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "Zdravotka",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '1112001221' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "MEDIPOS",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '14309711' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
},
{
"name": "MEDEVIO",
"sql": "SELECT * FROM transactions WHERE cislo_uctu = '2800046620' AND protiucet = '2701907026' AND transaction_date >= '2024-09-30' ORDER BY transaction_date DESC;"
}
]
}

1
.env Normal file
View File

@@ -0,0 +1 @@
FIO_TOKEN="v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"

2
.idea/FIO.iml generated
View File

@@ -4,7 +4,7 @@
<content url="file://$MODULE_DIR$"> <content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.venv" /> <excludeFolder url="file://$MODULE_DIR$/.venv" />
</content> </content>
<orderEntry type="jdk" jdkName="Python 3.12 (FIO)" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.13 (FIO)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
</module> </module>

1
.idea/misc.xml generated
View File

@@ -3,4 +3,5 @@
<component name="Black"> <component name="Black">
<option name="sdkName" value="Python 3.12 (FIO)" /> <option name="sdkName" value="Python 3.12 (FIO)" />
</component> </component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (FIO)" project-jdk-type="Python SDK" />
</project> </project>

4
.idea/vcs.xml generated
View File

@@ -1,4 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="VcsDirectoryMappings" defaultProject="true" /> <component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project> </project>

View File

205
010qrcodegenerator.py Normal file
View File

@@ -0,0 +1,205 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
============================================
QR Platba Generator Fully Documented Version
============================================
This script generates a valid Czech payment QR code (QR Platba)
according to the official ČBA (Czech Banking Association) specification v1.2.
The script creates a correct SPD (Short Payment Descriptor) string,
turns it into a QR code (PNG image), and saves it to disk.
✅ Works with all Czech banks (Fio, ČSOB, KB, Air Bank…)
✅ Supports both IBAN or domestic account format (account/bankcode)
✅ Automatically URL-encodes the message (so spaces and diacritics work)
✅ Can handle optional VS, SS, KS, due date, and instant-payment flag
Usage example (in your terminal):
---------------------------------
python generate_qrplatba.py \
--acc CZ7520100000002800046620 \
--amount 1543.50 \
--vs 7309208104 \
--ss 123456 \
--ks 0308 \
--date 09.12.2025 \
--msg "ahoj zlatíčko" \
--out qr_ok.png
"""
import argparse # For parsing command-line arguments
import urllib.parse # For safe URL-encoding of message text
from pathlib import Path # For easy file handling (cross-platform)
import qrcode # For generating the QR code image
from datetime import datetime # For date parsing and formatting
# --------------------------------------------------------------------
# 🧩 Helper function: Normalize date into the required YYYYMMDD format
# --------------------------------------------------------------------
def normalize_date(date_str: str) -> str:
"""
Converts a user-supplied date string (various formats) into the
standard YYYYMMDD format used by the QR Platba specification.
Acceptable input examples:
"20251209"
"09.12.2025"
"2025-12-09"
"09122025"
Returns:
str: normalized date string in "YYYYMMDD" format
or empty string "" if input is None or empty
"""
if not date_str:
return ""
s = date_str.strip()
# Already in correct 8-digit form?
if len(s) == 8 and s.isdigit():
return s
# Try several common formats
for fmt in ("%d.%m.%Y", "%Y-%m-%d", "%d%m%Y"):
try:
dt = datetime.strptime(s, fmt)
return dt.strftime("%Y%m%d")
except ValueError:
continue
# If all parsing attempts fail → raise an informative error
raise ValueError(
"Date must be in one of formats: YYYYMMDD, DD.MM.YYYY, YYYY-MM-DD, DDMMYYYY"
)
# --------------------------------------------------------------------
# 🔧 Helper function: Build the SPD string (core of QR Platba)
# --------------------------------------------------------------------
def build_spayd(params: dict) -> str:
"""
Assembles a valid SPD (Short Payment Descriptor) string
according to ČBA v1.2 specification.
Example output:
SPD*1.0*ACC:CZ7520100000002800046620*AM:1543.50*CC:CZK*DT:20251209*
X-VS:7309208104*X-SS:123456*X-KS:0308*MSG:ahoj%20zlaticko
"""
# Start with fixed header
parts = ["SPD*1.0*"]
# --- Required field: account number / IBAN ---
acc = params.get("acc", "").strip()
if not acc:
raise ValueError("Missing required parameter: --acc")
parts.append(f"ACC:{acc}*")
# --- Optional: amount ---
amt = params.get("amount")
if amt:
parts.append(f"AM:{float(amt):.2f}*") # always two decimals
# --- Optional: currency (defaults to CZK) ---
cc = params.get("cc") or "CZK"
parts.append(f"CC:{cc}*")
# --- Optional: due date ---
dt = params.get("dt")
if dt:
parts.append(f"DT:{dt}*")
# --- Optional: symbols (VS, SS, KS) ---
for key in ("X-VS", "X-SS", "X-KS"):
val = params.get(key.replace("-", "_").lower())
if val:
parts.append(f"{key}:{val}*")
# --- Optional: payment type (PT:IP = Instant Payment) ---
pt = params.get("pt")
if pt:
parts.append(f"PT:{pt}*")
# --- Optional: message for recipient ---
msg = params.get("msg") or ""
if msg:
# Encode to keep spaces and Czech letters valid
safe_chars = "$%*+-.:/"
encoded_msg = urllib.parse.quote(msg, safe=safe_chars)
parts.append(f"MSG:{encoded_msg}")
# Combine everything into one string
return "".join(parts)
# --------------------------------------------------------------------
# 🚀 Main program entry point
# --------------------------------------------------------------------
def main():
# -------------------------------
# Define all command-line options
# -------------------------------
parser = argparse.ArgumentParser(
description="Generate a Czech QR Platba (payment QR code)."
)
parser.add_argument("--acc", required=True,
help='Account: either "2800046620/2010" or IBAN "CZ..."')
parser.add_argument("--amount", help="Payment amount (e.g. 1543.50)")
parser.add_argument("--vs", help="Variable symbol (X-VS)")
parser.add_argument("--ss", help="Specific symbol (X-SS)")
parser.add_argument("--ks", help="Constant symbol (X-KS)")
parser.add_argument("--date", help="Due date (YYYYMMDD or DD.MM.YYYY etc.)")
parser.add_argument("--msg", help="Message for recipient (will be URL-encoded)")
parser.add_argument("--cc", default="CZK", help="Currency (default CZK)")
parser.add_argument("--pt", help="Payment type (e.g. IP for instant payment)")
parser.add_argument("--out", default="qrplatba.png", help="Output PNG file name")
args = parser.parse_args()
# -------------------------------
# Normalize and prepare arguments
# -------------------------------
dt_norm = normalize_date(args.date) if args.date else ""
params = {
"acc": args.acc,
"amount": args.amount,
"cc": args.cc,
"dt": dt_norm,
"x_vs": args.vs,
"x_ss": args.ss,
"x_ks": args.ks,
"msg": args.msg,
"pt": args.pt,
}
# -------------------------------
# Build the SPD string
# -------------------------------
spayd = build_spayd(params)
# -------------------------------
# Generate QR code image
# -------------------------------
img = qrcode.make(spayd)
output_path = Path(args.out)
img.save(output_path)
# -------------------------------
# Print results for verification
# -------------------------------
print("✅ QR Platba successfully generated")
print("SPD string (you can verify at https://qr-platba.cz/test/):")
print(spayd)
print()
print(f"📂 QR code image saved to: {output_path.resolve()}")
# --------------------------------------------------------------------
# 🔘 Run when executed directly
# --------------------------------------------------------------------
if __name__ == "__main__":
main()

131
011 test.py Normal file
View File

@@ -0,0 +1,131 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import urllib.parse
import qrcode
from pathlib import Path
from datetime import datetime
from PIL import Image, ImageTk
import customtkinter as ctk
from tkinter import messagebox
IBAN = "CZ7520100000002800046620"
CURRENCY = "CZK"
OUTPUT_DIR = Path("QRPlatby")
OUTPUT_DIR.mkdir(exist_ok=True)
PRIJMENI = "Buzalka"
JMENO = "Vladimír"
RODCIS = "730928104"
ITEMS = {
"Očkování chřipka Vaxigrip": 600.00,
"Očkování chřipka Efluelda": 1300.00,
}
def create_spayd(iban, amount, vs, msg, currency="CZK"):
msg_encoded = urllib.parse.quote(msg, safe="$%*+-.:/")
return f"SPD*1.0*ACC:{iban}*AM:{amount:.2f}*CC:{currency}*X-VS:{vs}*MSG:{msg_encoded}"
class QRPlatbaApp(ctk.CTk):
def __init__(self):
super().__init__()
self.title("QR Platba Ordinace MUDr. Buzalková")
self.geometry("520x520")
self.minsize(480, 480)
self.resizable(True, True)
ctk.set_appearance_mode("light")
ctk.set_default_color_theme("blue")
frame = ctk.CTkFrame(self, corner_radius=10)
frame.pack(expand=True, fill="both", padx=20, pady=20)
ctk.CTkLabel(frame, text="Generátor QR Platby",
font=("Arial", 20, "bold")).pack(pady=(10, 20))
patient = ctk.CTkFrame(frame, corner_radius=8)
patient.pack(fill="x", pady=(0, 20), padx=10)
for text in [f"Příjmení: {PRIJMENI}",
f"Jméno: {JMENO}",
f"Rodné číslo: {RODCIS}"]:
ctk.CTkLabel(patient, text=text, font=("Arial", 12)).pack(anchor="w", padx=10, pady=2)
pay = ctk.CTkFrame(frame, corner_radius=8)
pay.pack(fill="x", pady=(0, 20), padx=10)
ctk.CTkLabel(pay, text="Vyberte položku k úhradě:",
font=("Arial", 12, "bold")).pack(anchor="w", padx=10, pady=(10, 5))
# 🏷️ Prepare list like "Očkování chřipka Vaxigrip (600 Kč)"
self.display_items = [f"{name} ({price:.0f} Kč)" for name, price in ITEMS.items()]
self.item_map = {f"{name} ({price:.0f} Kč)": name for name, price in ITEMS.items()}
self.selected_item = ctk.StringVar(value=self.display_items[0])
self.combo = ctk.CTkOptionMenu(
pay,
variable=self.selected_item,
values=self.display_items,
font=("Arial", 12),
command=self.update_amount
)
self.combo.pack(fill="x", padx=10)
self.amount_label = ctk.CTkLabel(pay, text="", font=("Arial", 12, "italic"))
self.amount_label.pack(anchor="e", padx=10, pady=(5, 10))
self.update_amount()
ctk.CTkButton(frame, text="Vytvořit QR Platbu",
font=("Arial", 13, "bold"),
height=40,
command=self.generate_qr).pack(pady=10)
self.qr_label = ctk.CTkLabel(frame, text="")
self.qr_label.pack(pady=15)
ctk.CTkLabel(frame,
text="© Ordinace MUDr. Buzalková | QR Platba dle ČBA v1.2",
font=("Arial", 10),
text_color="#666").pack(side="bottom", pady=(10, 0))
# ✅ Center window on monitor
self.center_window()
def update_amount(self, _=None):
display_item = self.selected_item.get()
item = self.item_map[display_item]
self.amount_label.configure(text=f"Částka: {ITEMS[item]:.2f}")
def center_window(self):
self.update_idletasks() # ensure geometry info is up-to-date
width = self.winfo_width()
height = self.winfo_height()
screen_width = self.winfo_screenwidth()
screen_height = self.winfo_screenheight()
x = int((screen_width / 2) - (width / 2))
y = int((screen_height / 3) - (height / 2))
self.geometry(f"{width}x{height}+{x}+{y}")
def generate_qr(self):
display_item = self.selected_item.get()
item = self.item_map[display_item]
spayd = create_spayd(IBAN, ITEMS[item], RODCIS, f"{PRIJMENI} {JMENO} {item}", CURRENCY)
img = qrcode.make(spayd)
filename = f"{PRIJMENI}_{JMENO}_{datetime.now():%Y%m%d_%H%M%S}.png"
out_path = OUTPUT_DIR / filename
img.save(out_path)
img_resized = img.resize((300, 300))
qr_tk = ImageTk.PhotoImage(img_resized)
self.qr_label.configure(image=qr_tk)
self.qr_label.image = qr_tk
# 🔄 Adjust window height dynamically
self.update_idletasks()
self.geometry(f"{self.winfo_reqwidth()}x{self.winfo_reqheight()}")
# messagebox.showinfo("QR Platba vytvořena",
# f"Soubor uložen jako:\n{out_path}\n\nSPD řetězec:\n{spayd}")
if __name__ == "__main__":
app = QRPlatbaApp()
app.mainloop()

24
10 FioAPI.py Normal file
View File

@@ -0,0 +1,24 @@
import requests
import json
from datetime import date, timedelta
from dotenv import load_dotenv
import os
load_dotenv()
API_TOKEN = os.getenv("FIO_TOKEN")
# Last 30 days
start = (date.today() - timedelta(days=10000)).strftime("%Y-%m-%d")
end = date.today().strftime("%Y-%m-%d")
url = f"https://fioapi.fio.cz/v1/rest/periods/{API_TOKEN}/{start}/{end}/transactions.json"
response = requests.get(url)
print(response.status_code)
data = response.json()
with open(r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json", "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=4)

171
20 ReadJSON.py Normal file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import pymysql
import time
# =========================================
# CONFIG
# =========================================
JSON_PATH = r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500 # how many rows per executemany()
# =========================================
# HELPERS
# =========================================
def col(t, n):
"""
Safely read t['columnN']['value'], even if:
- columnN missing
- columnN is None
- value is None
"""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt):
"""
Convert Fio date '2025-10-26+0200' -> '2025-10-26'
(Fio spec: date is always rrrr-mm-dd+GMT)
"""
if not dt:
return None
return dt[:10]
# =========================================
# LOAD JSON
# =========================================
start_time = time.time()
with open(JSON_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
transactions = data["accountStatement"]["transactionList"].get("transaction", [])
# If only one transaction, Fio may return a dict instead of list
if isinstance(transactions, dict):
transactions = [transactions]
print(f"Loaded {len(transactions)} transactions from JSON")
account_number = data["accountStatement"]["info"]["accountId"]
# =========================================
# BUILD ROWS FOR BULK INSERT
# =========================================
rows = []
for t in transactions:
row = {
"datum": clean_date(col(t, 0)),
"objem": col(t, 1),
"mena": col(t, 14),
"cislo_uctu": account_number,
"protiucet": col(t, 2),
"kod_banky": col(t, 3),
"ks": col(t, 4),
"vs": col(t, 5),
"ss": col(t, 6),
"zprava": col(t, 16),
"poznamka": col(t, 25),
"id_operace": col(t, 22),
"id_pokynu": col(t, 24),
"ks1": col(t, 18),
"nazev_banky": col(t, 15),
"nazev_protiuctu": col(t, 10),
"ss1": col(t, 19),
"typ": col(t, 8),
"upr_objem": col(t, 20),
"upr_mena": col(t, 21),
"vs1": col(t, 17),
"zadal": col(t, 12),
}
rows.append(row)
print(f"Prepared {len(rows)} rows for DB insert/update")
# =========================================
# MYSQL INSERT (BATCHED)
# =========================================
conn = pymysql.connect(**DB)
cur = conn.cursor()
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum = VALUES(datum),
objem = VALUES(objem),
mena = VALUES(mena),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
ks = VALUES(ks),
vs = VALUES(vs),
ss = VALUES(ss),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
poznamka = VALUES(poznamka),
ks_1 = VALUES(ks_1),
nazev_banky = VALUES(nazev_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
ss_1 = VALUES(ss_1),
typ = VALUES(typ),
upresneni_objem = VALUES(upresneni_objem),
upresneni_mena = VALUES(upresneni_mena),
vs_1 = VALUES(vs_1),
zadal = VALUES(zadal)
"""
total = len(rows)
inserted = 0
for i in range(0, total, BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
# optional progress info:
# print(f"Committed {inserted}/{total} rows")
cur.close()
conn.close()
elapsed = time.time() - start_time
print(f"✓ Imported {inserted} transactions into MySQL in {elapsed:.2f} seconds.")

View File

@@ -0,0 +1,5 @@
WHATSAPP_TOKEN=EAAhOTShYLw4BQEh6HTO8fHmLHtbEXhZBEB03wgEXx1lVrcJkNHQlqPXZAlysqXIqse15bfL5V0kjNTnJ91kcK0DGkgpNtlzLaHbSgOdXaYUu9DOmSZACGAtqAj8nkSJz0ZA32Qz2BYJggyTxfkjwlT7rzMtGtbA9HwOA9AjEKG6JiiozBJmZCZA0nGBSx9JlHZCVQZDZD
PHONE_NUMBER_ID=420775735276
WHATSAPP_PHONE_NUMBER_ID=930187756843231
WHATSAPP_RECIPIENT_NUMBER=420775735276
WHATSAPP_TEST_NUMBER=15551451876

View File

@@ -0,0 +1,320 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
# Force UTF-8 output for Scheduled Tasks
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
"""
FIO EXPORT SCRIPT — FULLY COMMENTED VERSION
-------------------------------------------
This script connects to your MySQL "fio" database,
reads all transactions, and exports them into a highly formatted
Excel workbook.
Excel file includes:
• First sheet: "ALL" → contains ALL transactions
• Additional sheets: one for each account from accounts.json
• First 5 CZK sheets appear first in custom order
• All formatting exactly preserved (colors, borders, widths, formulas)
Everything is generated automatically.
"""
import mysql.connector
from mysql.connector import Error
from openpyxl import Workbook
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
from datetime import datetime
import os
import glob
import json
# ======================================================
# CONFIGURATION
# ======================================================
# MySQL server parameters
DB_HOST = "192.168.1.76"
DB_PORT = 3307
DB_USER = "root"
DB_PASS = "Vlado9674+"
DB_NAME = "fio"
# Where to save Excel files
OUTPUT_DIR = r"u:\Dropbox\!!!Days\Downloads Z230"
# JSON file with list of accounts (name + account_number)
ACCOUNTS_JSON = r"accounts.json"
# Columns that MUST be written as TEXT in Excel using ="value"
# to avoid Excel stripping zeros or changing formatting
TEXT_COLUMNS = ["cislo_uctu", "protiucet", "kod_banky", "ks", "vs", "ss"]
# ======================================================
# REMOVE OLD EXPORT FILES
# ======================================================
def cleanup_old_exports():
"""
Deletes older versions of exported XLSX files that match
specific filename patterns. This keeps your folder clean,
ensuring you only have the most recent export.
"""
patterns = [
os.path.join(OUTPUT_DIR, "*FIO*transaction*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO*transactions*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO_transactions*.xlsx"),
]
# Check each pattern
for pattern in patterns:
for file in glob.glob(pattern):
try:
os.remove(file)
print(f"🗑 Deleted old export: {file}")
except:
# If file cannot be deleted (locked or permission denied),
# simply skip it.
pass
# ======================================================
# CORE EXCEL FORMATTING FUNCTION
# ======================================================
def format_sheet(ws, rows, headers):
"""
Applies ALL formatting rules to a worksheet:
- Writes headers
- Writes all rows
- Converts selected columns to Excel text formulas
- Colors rows based on "objem" (red=negative, green=positive)
- Sets fixed column widths
- Adds borders to every cell
- Center-aligns first 10 columns
- Freezes header row and enables filtering
"""
# -------------------------------
# 1) Format HEADER row
# -------------------------------
for col_idx in range(1, len(headers) + 1):
cell = ws.cell(row=1, column=col_idx)
cell.font = Font(bold=True) # bold text
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid") # yellow background
# -------------------------------
# 2) Write DATA rows
# -------------------------------
for row in rows:
excel_row = []
for h in headers:
val = row[h]
# For text-sensitive columns, write ="value"
# This prevents Excel from stripping zeros or treating them as numbers.
if h in TEXT_COLUMNS and val is not None:
excel_row.append(f'="{val}"')
else:
excel_row.append(val)
ws.append(excel_row)
# -------------------------------
# 3) Background coloring by "objem"
# -------------------------------
# Light red (ARGB) = negative
fill_red = PatternFill(start_color="FFFFDDDD", end_color="FFFFDDDD", fill_type="solid")
# Light green (ARGB) = positive or zero
fill_green = PatternFill(start_color="FFEEFFEE", end_color="FFEEFFEE", fill_type="solid")
# Find column index where "objem" is located
objem_col_index = headers.index("objem") + 1
# Apply row coloring
for row_idx in range(2, len(rows) + 2): # Start at row 2 (row 1 = header)
cell_objem = ws.cell(row=row_idx, column=objem_col_index)
# Convert objem to float
try:
value = float(cell_objem.value)
except:
value = 0
# Choose correct color
fill = fill_red if value < 0 else fill_green
# Apply fill to entire row
for col_idx in range(1, len(headers) + 1):
ws.cell(row=row_idx, column=col_idx).fill = fill
# -------------------------------
# 4) Fixed column widths
# -------------------------------
fixed_widths = [
6, 11, 11, 5, 14, 14, 8, 6, 13, 13,
50, 53, 12, 12, 5, 49, 29, 5, 29, 16,
15, 12, 49, 20
]
# Apply width using A, B, C... column names
for i, width in enumerate(fixed_widths, start=1):
col_letter = chr(64 + i) # convert 1 → 'A', 2 → 'B', ...
ws.column_dimensions[col_letter].width = width
# -------------------------------
# 5) Add borders + alignment
# -------------------------------
thin = Side(border_style="thin", color="000000")
border = Border(left=thin, right=thin, top=thin, bottom=thin)
align_center = Alignment(horizontal="center")
total_rows = len(rows) + 1
total_cols = len(headers)
for row_idx in range(1, total_rows + 1):
for col_idx in range(1, total_cols + 1):
cell = ws.cell(row=row_idx, column=col_idx)
cell.border = border # add border
# Center-align ONLY first 10 columns
if col_idx <= 10:
cell.alignment = align_center
# Freeze header row so it stays visible while scrolling
ws.freeze_panes = "A2"
# Enable auto filter on top row
ws.auto_filter.ref = ws.dimensions
# ======================================================
# MAIN EXPORT PROCESS
# ======================================================
def export_fio():
print("Connecting to MySQL...")
# Connect to MySQL database
try:
conn = mysql.connector.connect(
host=DB_HOST,
port=DB_PORT,
user=DB_USER,
password=DB_PASS,
database=DB_NAME
)
except Error as e:
print("❌ Failed to connect:", e)
return
cur = conn.cursor(dictionary=True)
# -------------------------------
# Load accounts.json
# -------------------------------
with open(ACCOUNTS_JSON, "r", encoding="utf-8") as f:
accounts = json.load(f)
# -------------------------------
# Define priority first sheets
# -------------------------------
preferred_order = [
"CZK rodina",
"CZK ordinace",
"CZK na jídlo",
"CZK TrialHelp",
"CZK maminka svojě věci"
]
accounts_sorted = []
# Step 1: add priority accounts first
for pref in preferred_order:
for acc in accounts:
if acc["name"] == pref:
accounts_sorted.append(acc)
# Step 2: add remaining accounts afterward
for acc in accounts:
if acc not in accounts_sorted:
accounts_sorted.append(acc)
# -------------------------------
# Create a new Excel workbook
# -------------------------------
wb = Workbook()
wb.remove(wb.active) # remove default empty sheet
# -------------------------------
# FIRST SHEET: ALL TRANSACTIONS
# -------------------------------
cur.execute("SELECT * FROM transactions ORDER BY datum DESC")
all_rows = cur.fetchall()
if all_rows:
headers = list(all_rows[0].keys())
ws_all = wb.create_sheet(title="ALL")
ws_all.append(headers)
format_sheet(ws_all, all_rows, headers)
# -------------------------------
# INDIVIDUAL SHEETS PER ACCOUNT
# -------------------------------
for acc in accounts_sorted:
acc_num = acc["account_number"]
sheet_name = acc["name"][:31] # Excel sheet name limit
print(f"➡ Creating sheet: {sheet_name}")
query = f"""
SELECT *
FROM transactions
WHERE cislo_uctu = '{acc_num}'
ORDER BY datum DESC
"""
cur.execute(query)
rows = cur.fetchall()
if not rows:
print(f"⚠ No data for {sheet_name}")
continue
headers = list(rows[0].keys())
ws = wb.create_sheet(title=sheet_name)
ws.append(headers)
format_sheet(ws, rows, headers)
conn.close()
# -------------------------------
# Save Excel file
# -------------------------------
cleanup_old_exports()
# File name includes timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
filename = f"{timestamp} FIO transactions.xlsx"
output_file = os.path.join(OUTPUT_DIR, filename)
wb.save(output_file)
print(f"✅ Export complete:\n{output_file}")
# ======================================================
# MAIN ENTRY POINT
# ======================================================
if __name__ == "__main__":
export_fio()

View File

@@ -0,0 +1,5 @@
from Functions import SendWhatsAppMessage, get_dropbox_root
SendWhatsAppMessage("Ahoj Vlado, úloha dokončena!")
print(get_dropbox_root())

View File

@@ -0,0 +1,108 @@
# Function.py
import os
import time
import requests
from dotenv import load_dotenv
# Load .env variables once
load_dotenv()
WHATSAPP_TOKEN = os.getenv("WHATSAPP_TOKEN")
WHATSAPP_PHONE_ID = os.getenv("WHATSAPP_PHONE_NUMBER_ID")
WHATSAPP_RECIPIENT = os.getenv("WHATSAPP_RECIPIENT_NUMBER")
WAPI_URL = f"https://graph.facebook.com/v21.0/{WHATSAPP_PHONE_ID}/messages"
def SendWhatsAppMessage(message: str, retries: int = 3, delay: int = 2) -> bool:
"""
Sends a WhatsApp message using the WhatsApp Cloud API test number.
Automatically retries on failure.
:param message: Text to send.
:param retries: Number of retry attempts.
:param delay: Delay between retries (seconds).
:return: True if message sent successfully, False otherwise.
"""
# --- safety check: missing config ---
if not WHATSAPP_TOKEN or not WHATSAPP_PHONE_ID or not WHATSAPP_RECIPIENT:
print("❌ WhatsApp API configuration missing in .env")
return False
headers = {
"Authorization": f"Bearer {WHATSAPP_TOKEN}",
"Content-Type": "application/json"
}
payload = {
"messaging_product": "whatsapp",
"to": WHATSAPP_RECIPIENT,
"type": "text",
"text": {"body": message}
}
# --- retry loop ---
for attempt in range(1, retries + 1):
try:
response = requests.post(WAPI_URL, headers=headers, json=payload, timeout=15)
status = response.status_code
if status == 200:
print(f"📨 WhatsApp message sent successfully (attempt {attempt})")
return True
else:
print(f"⚠️ WhatsApp API error (attempt {attempt}): {status} {response.text}")
except requests.RequestException as e:
print(f"⚠️ Network error (attempt {attempt}): {e}")
time.sleep(delay)
print("❌ Failed to send WhatsApp message after retries.")
return False
# -----------------------------------------------------
# Find Dropbox root by reading official info.json
# -----------------------------------------------------
def get_dropbox_root() -> str | None:
# """
# Returns the absolute Dropbox folder path by reading:
# C:\Users\<user>\AppData\Local\Dropbox\info.json
# This is 100% reliable even if Dropbox changes drive letter.
# """
import os
import json
localapp = os.environ.get("LOCALAPPDATA")
if not localapp:
print("⚠️ LOCALAPPDATA not found.")
return None
info_path = os.path.join(localapp, "Dropbox", "info.json")
if not os.path.exists(info_path):
print(f"⚠️ Dropbox info.json not found at: {info_path}")
return None
try:
with open(info_path, "r", encoding="utf-8") as f:
data = json.load(f)
# Most users: `personal`
if "personal" in data and "path" in data["personal"]:
return data["personal"]["path"]
# Business Dropbox if used
if "business" in data and "path" in data["business"]:
return data["business"]["path"]
print("⚠️ Dropbox info.json missing 'path' in personal/business")
return None
except Exception as e:
print(f"⚠️ Error reading Dropbox info.json: {e}")
return None

View File

@@ -0,0 +1,92 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import subprocess
import time
from datetime import datetime
import sys
import io
# Force UTF-8 output for Scheduled Tasks
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPT1 = os.path.join(BASE_DIR, "21ReadJSONmultipleaccounts.py")
SCRIPT2 = os.path.join(BASE_DIR, "30Report.py")
LOG_DIR = os.path.join(BASE_DIR, "logs")
LOG_FILE = os.path.join(LOG_DIR, "FIOreport.log")
os.makedirs(LOG_DIR, exist_ok=True)
# Optional WhatsApp notify
try:
from Functions import SendWhatsAppMessage
WHATSAPP_AVAILABLE = True
except Exception:
WHATSAPP_AVAILABLE = False
def write_log(text):
with open(LOG_FILE, "a", encoding="utf-8") as f:
f.write(text + "\n")
print(text)
def run_script(path):
write_log(f"\n[{datetime.now()}] ➡ Running: {os.path.basename(path)}")
if not os.path.isfile(path):
write_log(f"❌ Script not found: {path}")
return False
process = subprocess.Popen(
[sys.executable, path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
encoding="utf-8",
errors="replace"
)
# Log STDOUT live
for line in process.stdout:
write_log(line.rstrip())
# Log STDERR live
for line in process.stderr:
write_log("⚠️ " + line.rstrip())
process.wait()
return process.returncode == 0
# ----------------------------------------------------------
# MAIN
# ----------------------------------------------------------
if __name__ == "__main__":
write_log("\n====================== NEW RUN ======================")
ok1 = run_script(SCRIPT1)
ok2 = False
if ok1:
write_log("✔ Stage 1 OK")
time.sleep(1)
ok2 = run_script(SCRIPT2)
if ok1 and ok2:
write_log("✔ All stages completed successfully")
if WHATSAPP_AVAILABLE:
SendWhatsAppMessage("✔ FIO import + report hotový.")
else:
write_log("❌ SOME PART FAILED — check above for errors")
if WHATSAPP_AVAILABLE:
SendWhatsAppMessage("❌ FIO proces selhal. Zkontroluj log.")
write_log("======================== END ========================\n")

View File

@@ -0,0 +1,28 @@
import os
import requests
from dotenv import load_dotenv
load_dotenv()
TOKEN = os.getenv("WHATSAPP_TOKEN")
PHONE_NUMBER_ID = os.getenv("WHATSAPP_PHONE_NUMBER_ID")
RECIPIENT = os.getenv("WHATSAPP_RECIPIENT_NUMBER")
def send_whatsapp_message(text):
url = f"https://graph.facebook.com/v22.0/{PHONE_NUMBER_ID}/messages"
headers = {
"Authorization": f"Bearer {TOKEN}",
"Content-Type": "application/json"
}
data = {
"messaging_product": "whatsapp",
"to": RECIPIENT,
"type": "text",
"text": {"body": text}
}
r = requests.post(url, headers=headers, json=data)
print("Status:", r.status_code)
print("Response:", r.text)
send_whatsapp_message("Ahoj Vlado! Test zpráva přes WhatsApp API 🔔")

View File

@@ -0,0 +1,53 @@
[
{
"name": "EUR tatínek 1",
"account_number": "2100074583",
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
},
{
"name": "CZK rodina",
"account_number": "2100046291",
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
},
{
"name": "EUR TrialHelp",
"account_number": "2200787265",
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
},
{
"name": "CZK tatínek",
"account_number": "2400046293",
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
},
{
"name": "CHF tatínek",
"account_number": "2402161017",
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
},
{
"name": "EUR tatínek 2",
"account_number": "2500074582",
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
},
{
"name": "CZK TrialHelp",
"account_number": "2900046548",
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
},
{
"name": "CZK maminka svojě věci",
"account_number": "2003310572",
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
},
{
"name": "CZK na jídlo",
"account_number": "2403310563",
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
},
{
"name": "CZK ordinace",
"account_number": "2800046620",
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
}
]

View File

@@ -0,0 +1,164 @@
====================== NEW RUN ======================
[2025-11-30 19:25:09.332782] ➡ Running: 21ReadJSONmultipleaccounts.py
=== Fio multi-account import ===
Období: 2025-09-01 až 2025-11-30
Načítám účty z JSON konfigurace...
Účtů v konfiguraci: 10
--- Účet: EUR tatínek 1 (2100074583) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100074583\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 2
✓ Zapsáno (insert/update): 2 řádků do DB za 0.27 s
--- Účet: CZK rodina (2100046291) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100046291\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 307
✓ Zapsáno (insert/update): 307 řádků do DB za 0.37 s
--- Účet: EUR TrialHelp (2200787265) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2200787265\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 2
✓ Zapsáno (insert/update): 2 řádků do DB za 0.22 s
--- Účet: CZK tatínek (2400046293) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2400046293\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: CHF tatínek (2402161017) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2402161017\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: EUR tatínek 2 (2500074582) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2500074582\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: CZK TrialHelp (2900046548) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2900046548\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 6
✓ Zapsáno (insert/update): 6 řádků do DB za 0.23 s
--- Účet: CZK maminka svojě věci (2003310572) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2003310572\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 75
✓ Zapsáno (insert/update): 75 řádků do DB za 0.27 s
--- Účet: CZK na jídlo (2403310563) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2403310563\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 119
✓ Zapsáno (insert/update): 119 řádků do DB za 0.38 s
--- Účet: CZK ordinace (2800046620) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2800046620\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 126
✓ Zapsáno (insert/update): 126 řádků do DB za 0.31 s
=== Hotovo. Celkem zapsáno 637 transakcí. Celkový čas: 2.94 s ===
⚠️ Could not find platform independent libraries <prefix>
✔ Stage 1 OK
[2025-11-30 19:25:13.462554] ➡ Running: 30Report.py
Connecting to MySQL...
➡ Creating sheet: CZK rodina
➡ Creating sheet: CZK ordinace
➡ Creating sheet: CZK na jídlo
➡ Creating sheet: CZK TrialHelp
➡ Creating sheet: CZK maminka svojě věci
➡ Creating sheet: EUR tatínek 1
➡ Creating sheet: EUR TrialHelp
➡ Creating sheet: CZK tatínek
➡ Creating sheet: CHF tatínek
➡ Creating sheet: EUR tatínek 2
🗑 Deleted old export: Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-22-48 FIO transactions.xlsx
✅ Export complete:
Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-25-54 FIO transactions.xlsx
⚠️ Could not find platform independent libraries <prefix>
✔ All stages completed successfully
======================== END ========================
====================== NEW RUN ======================
[2025-11-30 19:30:19.846254] ➡ Running: 21ReadJSONmultipleaccounts.py
=== Fio multi-account import ===
Období: 2025-09-01 až 2025-11-30
Načítám účty z JSON konfigurace...
Účtů v konfiguraci: 10
--- Účet: EUR tatínek 1 (2100074583) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100074583\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 2
✓ Zapsáno (insert/update): 2 řádků do DB za 0.26 s
--- Účet: CZK rodina (2100046291) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100046291\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 307
✓ Zapsáno (insert/update): 307 řádků do DB za 0.37 s
--- Účet: EUR TrialHelp (2200787265) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2200787265\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 2
✓ Zapsáno (insert/update): 2 řádků do DB za 0.23 s
--- Účet: CZK tatínek (2400046293) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2400046293\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: CHF tatínek (2402161017) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2402161017\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: EUR tatínek 2 (2500074582) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2500074582\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 0
Žádné transakce, jdu dál.
--- Účet: CZK TrialHelp (2900046548) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2900046548\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 6
✓ Zapsáno (insert/update): 6 řádků do DB za 0.31 s
--- Účet: CZK maminka svojě věci (2003310572) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2003310572\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 75
✓ Zapsáno (insert/update): 75 řádků do DB za 0.37 s
--- Účet: CZK na jídlo (2403310563) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2403310563\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 119
✓ Zapsáno (insert/update): 119 řádků do DB za 0.30 s
--- Účet: CZK ordinace (2800046620) ---
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2800046620\2025-09-01_to_2025-11-30.json
Počet transakcí v období: 126
✓ Zapsáno (insert/update): 126 řádků do DB za 0.30 s
=== Hotovo. Celkem zapsáno 637 transakcí. Celkový čas: 2.81 s ===
⚠️ Could not find platform independent libraries <prefix>
✔ Stage 1 OK
[2025-11-30 19:30:23.822641] ➡ Running: 30Report.py
Connecting to MySQL...
➡ Creating sheet: CZK rodina
➡ Creating sheet: CZK ordinace
➡ Creating sheet: CZK na jídlo
➡ Creating sheet: CZK TrialHelp
➡ Creating sheet: CZK maminka svojě věci
➡ Creating sheet: EUR tatínek 1
➡ Creating sheet: EUR TrialHelp
➡ Creating sheet: CZK tatínek
➡ Creating sheet: CHF tatínek
➡ Creating sheet: EUR tatínek 2
🗑 Deleted old export: Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-25-54 FIO transactions.xlsx
✅ Export complete:
Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-31-02 FIO transactions.xlsx
⚠️ Could not find platform independent libraries <prefix>
✔ All stages completed successfully
======================== END ========================

View File

@@ -0,0 +1,400 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import io
import time
from datetime import date, timedelta
from pathlib import Path
import json
import requests
import mysql.connector
from mysql.connector import Error
from typing import Dict, Any, List
import hashlib
# ====================================================================
# A. Vynucení UTF-8 pro správnou diakritiku v plánovaných úlohách
# ====================================================================
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
"""
FIO MULTIACCOUNT IMPORTER — VERZE S ROBUSTNĚJŠÍM HANDLINGEM PK
===============================================================
- mysql.connector (Oracle) pro stabilní manipulaci s datovými typy
- Bezpečné generování id_operace, pokud chybí Column22
- Správné mapování id_pokynu = Column19
- Detailní logování chybných řádků
"""
# =========================================
# CONFIGURATION
# =========================================
ACCOUNTS_FILE = r"accounts.json"
JSON_BASE_DIR = r"z:\Dropbox\!!!Days\Downloads Z230\Fio"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
DAYS_BACK = 90
# Zapíná detailnější logování při chybách insertu
DEBUG_ON_ERROR = True
# =========================================
# HELPERS
# =========================================
def load_accounts(path: str) -> List[Dict[str, str]]:
"""Reads accounts.json and validates content."""
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing '{key}' in account config: {acc}")
return accounts
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
"""Constructs the URL for Fio REST API periods endpoint."""
from_str = d_from.strftime("%Y-%m-%d")
to_str = d_to.strftime("%Y-%m-%d")
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date) -> Any:
"""Calls Fio API and fetches JSON."""
url = fio_url_for_period(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f" ❌ HTTP {resp.status_code} from Fio: {url}", flush=True)
return None
try:
return resp.json()
except json.JSONDecodeError:
print(" ❌ Cannot decode JSON from Fio response", flush=True)
return None
def safe_col(t: dict, n: int) -> Any:
"""SAFE ACCESSOR for Fio transaction column numbers (ColumnN)."""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt_str: str) -> str:
"""Strips timezone from Fio date string ("YYYY-MM-DD+HH:MM") → "YYYY-MM-DD"."""
if not dt_str:
return None
return str(dt_str)[:10]
def ensure_dir(path: Path):
"""Creates directory if it doesnt exist."""
path.mkdir(parents=True, exist_ok=True)
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date) -> Path:
"""Saves raw JSON to disk."""
acc_num_raw = account_cfg["account_number"]
acc_folder_name = acc_num_raw.replace("/", "_")
out_dir = Path(base_dir) / acc_folder_name
ensure_dir(out_dir)
filename = f"{d_from.strftime('%Y-%m-%d')}_to_{d_to.strftime('%Y-%m-%d')}.json"
out_path = out_dir / filename
with open(out_path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return out_path
def generate_fallback_id(fio_acc_id: str, t: dict) -> str:
"""
Vygeneruje deterministický fallback ID, pokud chybí Column22 (id pohybu).
Použije SHA1 hash z několika sloupců a ořízne na 20 znaků, aby se vešlo
do VARCHAR(20) primárního klíče.
"""
raw_date = clean_date(safe_col(t, 0)) or ""
amount = str(safe_col(t, 1) or "")
protiucet = str(safe_col(t, 2) or "")
vs = str(safe_col(t, 5) or "")
source = f"{fio_acc_id}|{raw_date}|{amount}|{protiucet}|{vs}"
digest = hashlib.sha1(source.encode("utf-8")).hexdigest()
return digest[:20]
# =========================================
# MAIN IMPORT LOGIC
# =========================================
def main():
start_all = time.time()
today = date.today()
d_from = today - timedelta(days=DAYS_BACK)
d_to = today
print("=== Fio multi-account import v3 (PK fix, lepší logování) ===", flush=True)
print(f"Období: {d_from}{d_to}", flush=True)
# Load all accounts from accounts.json
try:
accounts = load_accounts(ACCOUNTS_FILE)
except Exception as e:
print(f"FATÁLNÍ CHYBA při načítání účtů: {e}", flush=True)
return
print(f" Účtů v konfiguraci: {len(accounts)}\n", flush=True)
# Připojení k DB
try:
conn = mysql.connector.connect(
host=DB["host"],
port=DB["port"],
user=DB["user"],
password=DB["password"],
database=DB["database"],
charset=DB["charset"],
)
cur = conn.cursor()
except Error as e:
print(f"FATÁLNÍ CHYBA při připojení k DB: {e}", flush=True)
return
# SQL INSERT dotaz přizpůsobený nové DB struktuře
sql = """
INSERT INTO transactions
(
id_operace, cislo_uctu, transaction_date, amount, currency,
protiucet, kod_banky, nazev_protiuctu, nazev_banky, typ,
vs, ks, ss, uziv_identifikace, zprava_pro_prijemce,
provedl, id_pokynu, komentar, upr_objem_mena, api_bic, reference_platce
)
VALUES
(
%(id_operace)s, %(cislo_uctu)s, %(transaction_date)s, %(amount)s, %(currency)s,
%(protiucet)s, %(kod_banky)s, %(nazev_protiuctu)s, %(nazev_banky)s, %(typ)s,
%(vs)s, %(ks)s, %(ss)s, %(uziv_identifikace)s, %(zprava_pro_prijemce)s,
%(provedl)s, %(id_pokynu)s, %(komentar)s, %(upr_objem_mena)s, %(api_bic)s, %(reference_platce)s
)
ON DUPLICATE KEY UPDATE
cislo_uctu = VALUES(cislo_uctu),
transaction_date = VALUES(transaction_date),
amount = VALUES(amount),
currency = VALUES(currency),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
nazev_banky = VALUES(nazev_banky),
typ = VALUES(typ),
vs = VALUES(vs),
ks = VALUES(ks),
ss = VALUES(ss),
uziv_identifikace = VALUES(uziv_identifikace),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
provedl = VALUES(provedl),
id_pokynu = VALUES(id_pokynu),
komentar = VALUES(komentar),
upr_objem_mena = VALUES(upr_objem_mena),
api_bic = VALUES(api_bic),
reference_platce = VALUES(reference_platce)
"""
total_inserted = 0
total_skipped_pk = 0
total_skipped_error = 0
# ======================================================
# PROCESS EACH ACCOUNT IN accounts.json
# ======================================================
for acc in accounts:
name = acc["name"]
cfg_acc_num = acc["account_number"]
token = acc["token"]
print(f"--- Účet: {name} ({cfg_acc_num}) ---", flush=True)
t0 = time.time()
# 1) Download JSON from Fio API
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print(" Přeskakuji, žádná data / chyba API.\n", flush=True)
continue
# 2) Save raw JSON file to disk
try:
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f" JSON uložen do: {json_path}", flush=True)
except Exception as e:
print(f" ❌ Chyba při ukládání JSON souboru: {e}", flush=True)
# 3) Extract transactions from JSON tree
tlist = data.get("accountStatement", {}).get("transactionList", {}).get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f" Počet transakcí v období: {len(tlist)}", flush=True)
if not tlist:
print(" Žádné transakce, jdu dál.\n", flush=True)
continue
fio_acc_id = data.get("accountStatement", {}).get("info", {}).get("accountId")
if cfg_acc_num and fio_acc_id and cfg_acc_num.split("/")[0] not in fio_acc_id:
print(
f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})",
flush=True,
)
# 4) Build list of MySQL rows
rows = []
skipped_pk_account = 0
for t in tlist:
# id_operace = Column22 (tvé "id pohybu")
id_operace_val = safe_col(t, 22)
# Pokud chybí, vygenerujeme si stabilní fallback (hash) vejde se do VARCHAR(20)
if id_operace_val is None:
fallback = generate_fallback_id(fio_acc_id or "", t)
id_operace_val = fallback
# Můžeš odkomentovat, pokud chceš vidět, kde se používá fallback
# print(f" ⚠ Fallback id_operace (hash) pro transakci: {fallback}", flush=True)
# Bez PK nemá smysl zápis jen pro jistotu, fallback by měl vše pokrýt
if id_operace_val is None:
skipped_pk_account += 1
continue
transaction_date = clean_date(safe_col(t, 0))
if not transaction_date:
# Bez data by insert stejně spadl (NOT NULL), tak to raději přeskočíme
if DEBUG_ON_ERROR:
print(f" ⚠ Přeskakuji transakci bez data, id_operace={id_operace_val}", flush=True)
skipped_pk_account += 1
continue
id_pokynu_val = safe_col(t, 19) # tvé "id pokynu" = Column19
row = {
"id_operace": str(id_operace_val),
"cislo_uctu": fio_acc_id,
"transaction_date": transaction_date,
"amount": safe_col(t, 1),
"currency": safe_col(t, 14),
"typ": safe_col(t, 8),
"provedl": safe_col(t, 9),
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"nazev_protiuctu": safe_col(t, 10),
"nazev_banky": safe_col(t, 12),
"api_bic": safe_col(t, 26),
"vs": safe_col(t, 5),
"ks": safe_col(t, 4),
"ss": safe_col(t, 6),
"zprava_pro_prijemce": safe_col(t, 16),
"uziv_identifikace": safe_col(t, 7),
"komentar": safe_col(t, 25),
"upr_objem_mena": safe_col(t, 18),
"id_pokynu": str(id_pokynu_val) if id_pokynu_val is not None else None,
"reference_platce": safe_col(t, 27),
}
rows.append(row)
if skipped_pk_account:
print(f" ⚠ Přeskočeno {skipped_pk_account} transakcí kvůli chybějícímu/invalidnímu PK nebo datu.", flush=True)
total_skipped_pk += skipped_pk_account
# 5) INSERT rows into MySQL in batches
inserted = 0
skipped_error_account = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i: i + BATCH_SIZE]
try:
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
except Error as e:
print(f" ❌ Chyba při zápisu batch do DB: {e}", flush=True)
conn.rollback()
if DEBUG_ON_ERROR:
print(" ► Přecházím na per-row insert pro detail chyb...", flush=True)
for row in chunk:
try:
cur.execute(sql, row)
conn.commit()
inserted += 1
except Error as e_row:
skipped_error_account += 1
conn.rollback()
print(
f" ✗ Chybná transakce id_operace={row.get('id_operace')} "
f"datum={row.get('transaction_date')} částka={row.get('amount')} "
f"{e_row}",
flush=True,
)
elapsed = time.time() - t0
total_inserted += inserted
total_skipped_error += skipped_error_account
print(
f" ✓ Zapsáno (insert/update): {inserted} řádků do DB "
f"(přeskočeno chybějící PK/dat {skipped_pk_account}, chybou insertu {skipped_error_account}) "
f"za {elapsed:.2f} s\n",
flush=True,
)
# Close DB
cur.close()
conn.close()
total_elapsed = time.time() - start_all
print(
f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
f"Přeskočeno kvůli PK/datům: {total_skipped_pk}, kvůli chybě insertu: {total_skipped_error}. "
f"Celkový čas: {total_elapsed:.2f} s ===",
flush=True,
)
# ======================================================
# ENTRY POINT
# ======================================================
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,53 @@
[
{
"name": "EUR tatínek 1",
"account_number": "2100074583",
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
},
{
"name": "CZK rodina",
"account_number": "2100046291",
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
},
{
"name": "EUR TrialHelp",
"account_number": "2200787265",
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
},
{
"name": "CZK tatínek",
"account_number": "2400046293",
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
},
{
"name": "CHF tatínek",
"account_number": "2402161017",
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
},
{
"name": "EUR tatínek 2",
"account_number": "2500074582",
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
},
{
"name": "CZK TrialHelp",
"account_number": "2900046548",
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
},
{
"name": "CZK maminka svojě věci",
"account_number": "2003310572",
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
},
{
"name": "CZK na jídlo",
"account_number": "2403310563",
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
},
{
"name": "CZK ordinace",
"account_number": "2800046620",
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
}
]

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import json
import time
from datetime import date, timedelta
from pathlib import Path
import requests
import pymysql
# =========================================
# CONFIG
# =========================================
ACCOUNTS_FILE = r"u:\PycharmProjects\FIO\accounts.json"
JSON_BASE_DIR = r"u:\Dropbox\!!!Days\Downloads Z230\Fio"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
# =========================================
# HELPERS
# =========================================
def load_accounts(path: str):
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing key '{key}' in {acc}")
return accounts
def fio_period_url(token: str, d_from: date, d_to: date):
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{d_from:%Y-%m-%d}/{d_to:%Y-%m-%d}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date):
url = fio_period_url(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f"❌ HTTP {resp.status_code} from Fio: {url}")
return None
try:
return resp.json()
except json.JSONDecodeError:
print("❌ JSON decode error")
return None
def safe_col(t, n):
key = f"column{n}"
v = t.get(key)
return None if not v else v.get("value")
def clean_date(dt):
if not dt:
return None
return dt[:10] # "YYYY-MM-DD"
def ensure_dir(p: Path):
p.mkdir(parents=True, exist_ok=True)
def save_json(base_dir, account_cfg, data, d_from, d_to):
folder = account_cfg["account_number"].replace("/", "_")
out = Path(base_dir) / folder
ensure_dir(out)
filename = f"FULL_{d_from:%Y-%m-%d}_to_{d_to:%Y-%m-%d}.json"
path = out / filename
with open(path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return path
# =========================================
# MAIN
# =========================================
def main():
print("\n=== FULL HISTORY IMPORT (Fio extra-permissions token) ===\n")
accounts = load_accounts(ACCOUNTS_FILE)
# show accounts to user
for i, acc in enumerate(accounts, start=1):
print(f"{i}. {acc['name']} ({acc['account_number']})")
print()
while True:
try:
selection = int(input("Select account number (1..N): "))
if 1 <= selection <= len(accounts):
break
except:
pass
print("Invalid selection, try again.\n")
acc = accounts[selection - 1]
name = acc["name"]
token = acc["token"]
acc_num = acc["account_number"]
print(f"\nSelected: {name} ({acc_num})")
print("⚠ Make sure you generated the special 10-minute FULL-HISTORY TOKEN.")
input("Press ENTER to continue...")
# full 20-year history
today = date.today()
d_from = today.replace(year=today.year - 20)
d_to = today
print(f"\nDownloading ALL transactions from {d_from} to {d_to}")
start_time = time.time()
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print("❌ Download failed")
return
# save JSON
json_path = save_json(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f"JSON saved to {json_path}")
# extract transactions
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f"Transactions loaded: {len(tlist)}")
if not tlist:
print("No transactions found, exiting.")
return
# MySQL
conn = pymysql.connect(**DB)
cur = conn.cursor()
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum=VALUES(datum),
objem=VALUES(objem),
mena=VALUES(mena),
protiucet=VALUES(protiucet),
kod_banky=VALUES(kod_banky),
ks=VALUES(ks),
vs=VALUES(vs),
ss=VALUES(ss),
zprava_pro_prijemce=VALUES(zprava_pro_prijemce),
poznamka=VALUES(poznamka),
ks_1=VALUES(ks_1),
nazev_banky=VALUES(nazev_banky),
nazev_protiuctu=VALUES(nazev_protiuctu),
ss_1=VALUES(ss_1),
typ=VALUES(typ),
upresneni_objem=VALUES(upresneni_objem),
upresneni_mena=VALUES(upresneni_mena),
vs_1=VALUES(vs_1),
zadal=VALUES(zadal)
"""
fio_acc_id = data["accountStatement"]["info"]["accountId"]
# build batch
rows = []
for t in tlist:
rows.append({
"datum": clean_date(safe_col(t, 0)),
"objem": safe_col(t, 1),
"mena": safe_col(t, 14),
"cislo_uctu": fio_acc_id,
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"ks": safe_col(t, 4),
"vs": safe_col(t, 5),
"ss": safe_col(t, 6),
"zprava": safe_col(t, 16),
"poznamka": safe_col(t, 25),
"id_operace": safe_col(t, 22),
"id_pokynu": safe_col(t, 24),
"ks1": safe_col(t, 18),
"nazev_banky": safe_col(t, 15),
"nazev_protiuctu": safe_col(t, 10),
"ss1": safe_col(t, 19),
"typ": safe_col(t, 8),
"upr_objem": safe_col(t, 20),
"upr_mena": safe_col(t, 21),
"vs1": safe_col(t, 17),
"zadal": safe_col(t, 12),
})
# batch insert
inserted = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
cur.close()
conn.close()
print(f"\n✓ Inserted/updated {inserted} transactions.")
print(f"Total time: {time.time() - start_time:.2f} s")
if __name__ == "__main__":
main()

313
30 Report.py Normal file
View File

@@ -0,0 +1,313 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
FIO EXPORT SCRIPT — FULLY COMMENTED VERSION
-------------------------------------------
This script connects to your MySQL "fio" database,
reads all transactions, and exports them into a highly formatted
Excel workbook.
Excel file includes:
• First sheet: "ALL" → contains ALL transactions
• Additional sheets: one for each account from accounts.json
• First 5 CZK sheets appear first in custom order
• All formatting exactly preserved (colors, borders, widths, formulas)
Everything is generated automatically.
"""
import mysql.connector
from mysql.connector import Error
from openpyxl import Workbook
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
from datetime import datetime
import os
import glob
import json
# ======================================================
# CONFIGURATION
# ======================================================
# MySQL server parameters
DB_HOST = "192.168.1.76"
DB_PORT = 3307
DB_USER = "root"
DB_PASS = "Vlado9674+"
DB_NAME = "fio"
# Where to save Excel files
OUTPUT_DIR = r"Z:\Dropbox\!!!Days\Downloads Z230"
# JSON file with list of accounts (name + account_number)
ACCOUNTS_JSON = r"C:\Users\vlado\PycharmProjects\FIO\accounts.json"
# Columns that MUST be written as TEXT in Excel using ="value"
# to avoid Excel stripping zeros or changing formatting
TEXT_COLUMNS = ["cislo_uctu", "protiucet", "kod_banky", "ks", "vs", "ss"]
# ======================================================
# REMOVE OLD EXPORT FILES
# ======================================================
def cleanup_old_exports():
"""
Deletes older versions of exported XLSX files that match
specific filename patterns. This keeps your folder clean,
ensuring you only have the most recent export.
"""
patterns = [
os.path.join(OUTPUT_DIR, "*FIO*transaction*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO*transactions*.xlsx"),
os.path.join(OUTPUT_DIR, "*FIO_transactions*.xlsx"),
]
# Check each pattern
for pattern in patterns:
for file in glob.glob(pattern):
try:
os.remove(file)
print(f"🗑 Deleted old export: {file}")
except:
# If file cannot be deleted (locked or permission denied),
# simply skip it.
pass
# ======================================================
# CORE EXCEL FORMATTING FUNCTION
# ======================================================
def format_sheet(ws, rows, headers):
"""
Applies ALL formatting rules to a worksheet:
- Writes headers
- Writes all rows
- Converts selected columns to Excel text formulas
- Colors rows based on "objem" (red=negative, green=positive)
- Sets fixed column widths
- Adds borders to every cell
- Center-aligns first 10 columns
- Freezes header row and enables filtering
"""
# -------------------------------
# 1) Format HEADER row
# -------------------------------
for col_idx in range(1, len(headers) + 1):
cell = ws.cell(row=1, column=col_idx)
cell.font = Font(bold=True) # bold text
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid") # yellow background
# -------------------------------
# 2) Write DATA rows
# -------------------------------
for row in rows:
excel_row = []
for h in headers:
val = row[h]
# For text-sensitive columns, write ="value"
# This prevents Excel from stripping zeros or treating them as numbers.
if h in TEXT_COLUMNS and val is not None:
excel_row.append(f'="{val}"')
else:
excel_row.append(val)
ws.append(excel_row)
# -------------------------------
# 3) Background coloring by "objem"
# -------------------------------
# Light red (ARGB) = negative
fill_red = PatternFill(start_color="FFFFDDDD", end_color="FFFFDDDD", fill_type="solid")
# Light green (ARGB) = positive or zero
fill_green = PatternFill(start_color="FFEEFFEE", end_color="FFEEFFEE", fill_type="solid")
# Find column index where "objem" is located
objem_col_index = headers.index("objem") + 1
# Apply row coloring
for row_idx in range(2, len(rows) + 2): # Start at row 2 (row 1 = header)
cell_objem = ws.cell(row=row_idx, column=objem_col_index)
# Convert objem to float
try:
value = float(cell_objem.value)
except:
value = 0
# Choose correct color
fill = fill_red if value < 0 else fill_green
# Apply fill to entire row
for col_idx in range(1, len(headers) + 1):
ws.cell(row=row_idx, column=col_idx).fill = fill
# -------------------------------
# 4) Fixed column widths
# -------------------------------
fixed_widths = [
6, 11, 11, 5, 14, 14, 8, 6, 13, 13,
50, 53, 12, 12, 5, 49, 29, 5, 29, 16,
15, 12, 49, 20
]
# Apply width using A, B, C... column names
for i, width in enumerate(fixed_widths, start=1):
col_letter = chr(64 + i) # convert 1 → 'A', 2 → 'B', ...
ws.column_dimensions[col_letter].width = width
# -------------------------------
# 5) Add borders + alignment
# -------------------------------
thin = Side(border_style="thin", color="000000")
border = Border(left=thin, right=thin, top=thin, bottom=thin)
align_center = Alignment(horizontal="center")
total_rows = len(rows) + 1
total_cols = len(headers)
for row_idx in range(1, total_rows + 1):
for col_idx in range(1, total_cols + 1):
cell = ws.cell(row=row_idx, column=col_idx)
cell.border = border # add border
# Center-align ONLY first 10 columns
if col_idx <= 10:
cell.alignment = align_center
# Freeze header row so it stays visible while scrolling
ws.freeze_panes = "A2"
# Enable auto filter on top row
ws.auto_filter.ref = ws.dimensions
# ======================================================
# MAIN EXPORT PROCESS
# ======================================================
def export_fio():
print("Connecting to MySQL...")
# Connect to MySQL database
try:
conn = mysql.connector.connect(
host=DB_HOST,
port=DB_PORT,
user=DB_USER,
password=DB_PASS,
database=DB_NAME
)
except Error as e:
print("❌ Failed to connect:", e)
return
cur = conn.cursor(dictionary=True)
# -------------------------------
# Load accounts.json
# -------------------------------
with open(ACCOUNTS_JSON, "r", encoding="utf-8") as f:
accounts = json.load(f)
# -------------------------------
# Define priority first sheets
# -------------------------------
preferred_order = [
"CZK rodina",
"CZK ordinace",
"CZK na jídlo",
"CZK TrialHelp",
"CZK maminka svojě věci"
]
accounts_sorted = []
# Step 1: add priority accounts first
for pref in preferred_order:
for acc in accounts:
if acc["name"] == pref:
accounts_sorted.append(acc)
# Step 2: add remaining accounts afterward
for acc in accounts:
if acc not in accounts_sorted:
accounts_sorted.append(acc)
# -------------------------------
# Create a new Excel workbook
# -------------------------------
wb = Workbook()
wb.remove(wb.active) # remove default empty sheet
# -------------------------------
# FIRST SHEET: ALL TRANSACTIONS
# -------------------------------
cur.execute("SELECT * FROM transactions ORDER BY datum DESC")
all_rows = cur.fetchall()
if all_rows:
headers = list(all_rows[0].keys())
ws_all = wb.create_sheet(title="ALL")
ws_all.append(headers)
format_sheet(ws_all, all_rows, headers)
# -------------------------------
# INDIVIDUAL SHEETS PER ACCOUNT
# -------------------------------
for acc in accounts_sorted:
acc_num = acc["account_number"]
sheet_name = acc["name"][:31] # Excel sheet name limit
print(f"➡ Creating sheet: {sheet_name}")
query = f"""
SELECT *
FROM transactions
WHERE cislo_uctu = '{acc_num}'
ORDER BY datum DESC
"""
cur.execute(query)
rows = cur.fetchall()
if not rows:
print(f"⚠ No data for {sheet_name}")
continue
headers = list(rows[0].keys())
ws = wb.create_sheet(title=sheet_name)
ws.append(headers)
format_sheet(ws, rows, headers)
conn.close()
# -------------------------------
# Save Excel file
# -------------------------------
cleanup_old_exports()
# File name includes timestamp
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
filename = f"{timestamp} FIO transactions.xlsx"
output_file = os.path.join(OUTPUT_DIR, filename)
wb.save(output_file)
print(f"✅ Export complete:\n{output_file}")
# ======================================================
# MAIN ENTRY POINT
# ======================================================
if __name__ == "__main__":
export_fio()

7
31 Python.py Normal file
View File

@@ -0,0 +1,7 @@
import os
DROPBOX = os.path.join(os.path.expanduser("~"), "Dropbox")
SHARED_PATH = os.path.join(DROPBOX, "PycharmShared")
GLOBAL_ENV = os.path.join(SHARED_PATH, ".env")
print(DROPBOX)

165
QRPlatbaApp.py Normal file
View File

@@ -0,0 +1,165 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import urllib.parse
import qrcode
from pathlib import Path
from datetime import datetime
from PIL import Image, ImageTk
import customtkinter as ctk
from tkinter import messagebox
# ================================
# ⚙️ Default Configuration
# ================================
IBAN = "CZ7520100000002800046620"
CURRENCY = "CZK"
OUTPUT_DIR = Path("QRPlatby")
OUTPUT_DIR.mkdir(exist_ok=True)
# Default values (can be overridden by arguments)
PRIJMENI = "Buzalka"
JMENO = "Vladimír"
RODCIS = "730928104"
# ================================
# 💬 Argument Handling
# ================================
if len(sys.argv) >= 4:
JMENO = sys.argv[1]
PRIJMENI = sys.argv[2]
RODCIS = sys.argv[3]
elif len(sys.argv) == 2 and sys.argv[1] in ("-h", "--help"):
print("Usage: QRPlatbaApp.py JMENO PRIJMENI RODCIS")
sys.exit(0)
# ================================
# 💉 Items to Pay
# ================================
ITEMS = {
"Očkování chřipka Vaxigrip": 600.00,
"Očkování chřipka Efluelda": 1300.00,
}
# ================================
# 🧩 Helper
# ================================
def create_spayd(iban, amount, vs, msg, currency="CZK"):
msg_encoded = urllib.parse.quote(msg, safe="$%*+-.:/")
return f"SPD*1.0*ACC:{iban}*AM:{amount:.2f}*CC:{currency}*X-VS:{vs}*MSG:{msg_encoded}"
# ================================
# 🪟 GUI Class
# ================================
class QRPlatbaApp(ctk.CTk):
def __init__(self):
super().__init__()
self.title("QR Platba Ordinace MUDr. Buzalková")
self.geometry("520x520")
self.minsize(480, 480)
self.resizable(True, True)
ctk.set_appearance_mode("light")
ctk.set_default_color_theme("blue")
frame = ctk.CTkFrame(self, corner_radius=10)
frame.pack(expand=True, fill="both", padx=20, pady=20)
ctk.CTkLabel(frame, text="Generátor QR Platby",
font=("Arial", 20, "bold")).pack(pady=(10, 20))
# 👤 Patient Info
patient = ctk.CTkFrame(frame, corner_radius=8)
patient.pack(fill="x", pady=(0, 20), padx=10)
for text in [f"Příjmení: {PRIJMENI}",
f"Jméno: {JMENO}",
f"Rodné číslo: {RODCIS}"]:
ctk.CTkLabel(patient, text=text, font=("Arial", 12)).pack(anchor="w", padx=10, pady=2)
# 💰 Payment Section
pay = ctk.CTkFrame(frame, corner_radius=8)
pay.pack(fill="x", pady=(0, 20), padx=10)
ctk.CTkLabel(pay, text="Vyberte položku k úhradě:",
font=("Arial", 12, "bold")).pack(anchor="w", padx=10, pady=(10, 5))
self.display_items = [f"{name} ({price:.0f} Kč)" for name, price in ITEMS.items()]
self.item_map = {f"{name} ({price:.0f} Kč)": name for name, price in ITEMS.items()}
self.selected_item = ctk.StringVar(value=self.display_items[0])
self.combo = ctk.CTkOptionMenu(
pay,
variable=self.selected_item,
values=self.display_items,
font=("Arial", 12),
command=self.update_amount
)
self.combo.pack(fill="x", padx=10)
self.amount_label = ctk.CTkLabel(pay, text="", font=("Arial", 12, "italic"))
self.amount_label.pack(anchor="e", padx=10, pady=(5, 10))
self.update_amount()
ctk.CTkButton(frame, text="Vytvořit QR Platbu",
font=("Arial", 13, "bold"),
height=40,
command=self.generate_qr).pack(pady=10)
self.qr_label = ctk.CTkLabel(frame, text="")
self.qr_label.pack(pady=15)
ctk.CTkLabel(frame,
text="© Ordinace MUDr. Buzalková | QR Platba dle ČBA v1.2",
font=("Arial", 10),
text_color="#666").pack(side="bottom", pady=(10, 0))
self.center_window()
# ================================
# 🪟 Center Window
# ================================
def center_window(self):
self.update_idletasks()
width = self.winfo_width()
height = self.winfo_height()
screen_width = self.winfo_screenwidth()
screen_height = self.winfo_screenheight()
x = int((screen_width / 2) - (width / 2))
y = int((screen_height / 2) - (height / 2))
self.geometry(f"{width}x{height}+{x}+{y}")
# ================================
# 💸 Update and Generate
# ================================
def update_amount(self, _=None):
display_item = self.selected_item.get()
item = self.item_map[display_item]
self.amount_label.configure(text=f"Částka: {ITEMS[item]:.2f}")
def generate_qr(self):
display_item = self.selected_item.get()
item = self.item_map[display_item]
spayd = create_spayd(IBAN, ITEMS[item], RODCIS, f"{PRIJMENI} {JMENO} {item}", CURRENCY)
img = qrcode.make(spayd)
filename = f"{PRIJMENI}_{JMENO}_{datetime.now():%Y%m%d_%H%M%S}.png"
out_path = OUTPUT_DIR / filename
img.save(out_path)
img_resized = img.resize((300, 300))
qr_tk = ImageTk.PhotoImage(img_resized)
self.qr_label.configure(image=qr_tk)
self.qr_label.image = qr_tk
self.update_idletasks()
self.geometry(f"{self.winfo_reqwidth()}x{self.winfo_reqheight()}")
self.center_window()
# ================================
# 🚀 Main
# ================================
if __name__ == "__main__":
app = QRPlatbaApp()
app.mainloop()

38
QRPlatbaApp.spec Normal file
View File

@@ -0,0 +1,38 @@
# -*- mode: python ; coding: utf-8 -*-
a = Analysis(
['QRPlatbaApp.py'],
pathex=[],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
noarchive=False,
optimize=0,
)
pyz = PYZ(a.pure)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.datas,
[],
name='QRPlatbaApp',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=False,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

7466
SupportFiles/pohyby.json Normal file

File diff suppressed because it is too large Load Diff

1211786
SupportFiles/pohyby.jsonold Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,53 @@
/*
Navicat Premium Dump SQL
Source Server : MySQL Tower
Source Server Type : MySQL
Source Server Version : 90500 (9.5.0)
Source Host : 192.168.1.76:3307
Source Schema : fio
Target Server Type : MySQL
Target Server Version : 90500 (9.5.0)
File Encoding : 65001
Date: 25/11/2025 15:23:42
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for transactions
-- ----------------------------
DROP TABLE IF EXISTS `transactions`;
CREATE TABLE `transactions` (
`id` int NOT NULL AUTO_INCREMENT,
`datum` date NULL DEFAULT NULL,
`objem` decimal(14, 2) NULL DEFAULT NULL,
`mena` char(3) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`cislo_uctu` varchar(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`protiucet` varchar(40) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`kod_banky` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`ks` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`vs` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`ss` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`zprava_pro_prijemce` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`poznamka` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`id_operace` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`id_pokynu` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`ks_1` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`nazev_banky` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`nazev_protiuctu` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`ss_1` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`typ` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`upresneni_objem` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`upresneni_mena` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`vs_1` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`zadal` varchar(200) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
`imported_at` datetime NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uniq_tx`(`cislo_uctu` ASC, `id_operace` ASC, `id_pokynu` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 9825 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
SET FOREIGN_KEY_CHECKS = 1;

53
accounts.json Normal file
View File

@@ -0,0 +1,53 @@
[
{
"name": "EUR tatínek 1",
"account_number": "2100074583",
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
},
{
"name": "CZK rodina",
"account_number": "2100046291",
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
},
{
"name": "EUR TrialHelp",
"account_number": "2200787265",
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
},
{
"name": "CZK tatínek",
"account_number": "2400046293",
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
},
{
"name": "CHF tatínek",
"account_number": "2402161017",
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
},
{
"name": "EUR tatínek 2",
"account_number": "2500074582",
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
},
{
"name": "CZK TrialHelp",
"account_number": "2900046548",
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
},
{
"name": "CZK maminka svojě věci",
"account_number": "2003310572",
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
},
{
"name": "CZK na jídlo",
"account_number": "2403310563",
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
},
{
"name": "CZK ordinace",
"account_number": "2800046620",
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
}
]

File diff suppressed because it is too large Load Diff

2820
build/QRPlatbaApp/EXE-00.toc Normal file

File diff suppressed because it is too large Load Diff

2798
build/QRPlatbaApp/PKG-00.toc Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

1393
build/QRPlatbaApp/PYZ-00.toc Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,59 @@
This file lists modules PyInstaller was not able to find. This does not
necessarily mean this module is required for running your program. Python and
Python 3rd-party packages include a lot of conditional or optional modules. For
example the module 'ntpath' only exists on Windows, whereas the module
'posixpath' only exists on Posix systems.
Types if import:
* top-level: imported at the top-level - look at these first
* conditional: imported within an if-statement
* delayed: imported within a function
* optional: imported within a try-except-statement
IMPORTANT: Do NOT post this list to the issue-tracker. Use it as a basis for
tracking down the missing module yourself. Thanks!
missing module named pwd - imported by posixpath (delayed, conditional, optional), shutil (delayed, optional), tarfile (optional), pathlib (delayed, optional), subprocess (delayed, conditional, optional), setuptools._distutils.util (delayed, conditional, optional), netrc (delayed, conditional), getpass (delayed), setuptools._vendor.backports.tarfile (optional), setuptools._distutils.archive_util (optional), http.server (delayed, optional)
missing module named grp - imported by shutil (delayed, optional), tarfile (optional), pathlib (delayed, optional), subprocess (delayed, conditional, optional), setuptools._vendor.backports.tarfile (optional), setuptools._distutils.archive_util (optional)
missing module named _posixsubprocess - imported by subprocess (conditional), multiprocessing.util (delayed)
missing module named fcntl - imported by subprocess (optional)
missing module named _frozen_importlib_external - imported by importlib._bootstrap (delayed), importlib (optional), importlib.abc (optional), zipimport (top-level)
excluded module named _frozen_importlib - imported by importlib (optional), importlib.abc (optional), zipimport (top-level)
missing module named posix - imported by os (conditional, optional), shutil (conditional), importlib._bootstrap_external (conditional), posixpath (optional)
missing module named resource - imported by posix (top-level)
missing module named _posixshmem - imported by multiprocessing.resource_tracker (conditional), multiprocessing.shared_memory (conditional)
missing module named _scproxy - imported by urllib.request (conditional)
missing module named termios - imported by getpass (optional), tty (top-level)
missing module named multiprocessing.BufferTooShort - imported by multiprocessing (top-level), multiprocessing.connection (top-level)
missing module named multiprocessing.AuthenticationError - imported by multiprocessing (top-level), multiprocessing.connection (top-level)
missing module named multiprocessing.get_context - imported by multiprocessing (top-level), multiprocessing.pool (top-level), multiprocessing.managers (top-level), multiprocessing.sharedctypes (top-level)
missing module named multiprocessing.TimeoutError - imported by multiprocessing (top-level), multiprocessing.pool (top-level)
missing module named multiprocessing.set_start_method - imported by multiprocessing (top-level), multiprocessing.spawn (top-level)
missing module named multiprocessing.get_start_method - imported by multiprocessing (top-level), multiprocessing.spawn (top-level)
missing module named pyimod02_importers - imported by U:\PycharmProjects\FIO\.venv\Lib\site-packages\PyInstaller\hooks\rthooks\pyi_rth_pkgutil.py (delayed)
missing module named typing_extensions.Buffer - imported by setuptools._vendor.typing_extensions (top-level), setuptools._vendor.wheel.wheelfile (conditional)
missing module named typing_extensions.Literal - imported by setuptools._vendor.typing_extensions (top-level), setuptools.config._validate_pyproject.formats (conditional)
missing module named typing_extensions.Self - imported by setuptools._vendor.typing_extensions (top-level), setuptools.config.expand (conditional), setuptools.config.pyprojecttoml (conditional), setuptools.config._validate_pyproject.error_reporting (conditional)
missing module named typing_extensions.deprecated - imported by setuptools._vendor.typing_extensions (top-level), setuptools._distutils.sysconfig (conditional), setuptools._distutils.command.bdist (conditional)
missing module named typing_extensions.TypeAlias - imported by setuptools._vendor.typing_extensions (top-level), setuptools._distutils.compilers.C.base (conditional), setuptools._reqs (conditional), setuptools.warnings (conditional), setuptools._path (conditional), setuptools._distutils.dist (conditional), setuptools.config.setupcfg (conditional), setuptools.config._apply_pyprojecttoml (conditional), setuptools.dist (conditional), setuptools.command.bdist_egg (conditional), setuptools.compat.py311 (conditional)
missing module named typing_extensions.Unpack - imported by setuptools._vendor.typing_extensions (top-level), setuptools._distutils.util (conditional), setuptools._distutils.compilers.C.base (conditional), setuptools._distutils.cmd (conditional)
missing module named typing_extensions.TypeVarTuple - imported by setuptools._vendor.typing_extensions (top-level), setuptools._distutils.util (conditional), setuptools._distutils.compilers.C.base (conditional), setuptools._distutils.cmd (conditional)
missing module named asyncio.DefaultEventLoopPolicy - imported by asyncio (delayed, conditional), asyncio.events (delayed, conditional)
missing module named vms_lib - imported by platform (delayed, optional)
missing module named 'java.lang' - imported by platform (delayed, optional)
missing module named java - imported by platform (delayed)
missing module named _winreg - imported by platform (delayed, optional)
missing module named usercustomize - imported by site (delayed, optional)
missing module named sitecustomize - imported by site (delayed, optional)
missing module named readline - imported by site (delayed, optional), rlcompleter (optional)
missing module named _typeshed - imported by setuptools._distutils.dist (conditional), setuptools.glob (conditional), setuptools.compat.py311 (conditional)
missing module named _manylinux - imported by packaging._manylinux (delayed, optional), setuptools._vendor.packaging._manylinux (delayed, optional), setuptools._vendor.wheel.vendored.packaging._manylinux (delayed, optional)
missing module named importlib_resources - imported by setuptools._vendor.jaraco.text (optional)
missing module named trove_classifiers - imported by setuptools.config._validate_pyproject.formats (optional)
missing module named PyObjCTools - imported by darkdetect._mac_detect (optional)
missing module named Foundation - imported by darkdetect._mac_detect (optional)
missing module named numpy - imported by PIL._typing (conditional, optional)
missing module named olefile - imported by PIL.FpxImagePlugin (top-level), PIL.MicImagePlugin (top-level)
missing module named defusedxml - imported by PIL.Image (optional)
missing module named png - imported by qrcode.compat.png (optional)

File diff suppressed because it is too large Load Diff

BIN
dist/QRPlatbaApp.exe vendored Normal file

Binary file not shown.

BIN
fio_payment.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 898 B

BIN
fio_qr_correct.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

BIN
fio_qr_valid_v12.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

15
orchestrator.log Normal file
View File

@@ -0,0 +1,15 @@
[2025-12-06 09:12:10] === ORCHESTRATOR START ===
[2025-12-06 09:12:10] ▶ STARTING: C:\Users\vlado\PycharmProjects\FIO\10 Read.py
[2025-12-06 09:12:10] --- ERROR ---
c:\Reporting\Python\python.exe: can't open file 'C:\\Users\\vlado\\PycharmProjects\\FIO\\10 Read.py': [Errno 2] No such file or directory
[2025-12-06 09:12:10] ❌ FINISHED WITH ERRORS (2): C:\Users\vlado\PycharmProjects\FIO\10 Read.py
[2025-12-06 09:12:10] ▶ STARTING: C:\Users\vlado\PycharmProjects\FIO\30 Report ordinace.py
[2025-12-06 09:12:10] --- ERROR ---
c:\Reporting\Python\python.exe: can't open file 'C:\\Users\\vlado\\PycharmProjects\\FIO\\30 Report ordinace.py': [Errno 2] No such file or directory
[2025-12-06 09:12:10] ❌ FINISHED WITH ERRORS (2): C:\Users\vlado\PycharmProjects\FIO\30 Report ordinace.py
[2025-12-06 09:12:10] ▶ STARTING: C:\Users\vlado\PycharmProjects\FIO\31 Report ordinace expenses.py
[2025-12-06 09:12:10] --- ERROR ---
c:\Reporting\Python\python.exe: can't open file 'C:\\Users\\vlado\\PycharmProjects\\FIO\\31 Report ordinace expenses.py': [Errno 2] No such file or directory
[2025-12-06 09:12:10] ❌ FINISHED WITH ERRORS (2): C:\Users\vlado\PycharmProjects\FIO\31 Report ordinace expenses.py
[2025-12-06 09:12:10] === ORCHESTRATOR END ===

BIN
qrplatba_ok.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
test_text.xlsx Normal file

Binary file not shown.