reporter
This commit is contained in:
2
.idea/FIO.iml
generated
2
.idea/FIO.iml
generated
@@ -4,7 +4,7 @@
|
|||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||||
</content>
|
</content>
|
||||||
<orderEntry type="jdk" jdkName="Python 3.12 (FIO)" jdkType="Python SDK" />
|
<orderEntry type="jdk" jdkName="Python 3.13 (FIO)" jdkType="Python SDK" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
</component>
|
</component>
|
||||||
</module>
|
</module>
|
||||||
1
.idea/misc.xml
generated
1
.idea/misc.xml
generated
@@ -3,4 +3,5 @@
|
|||||||
<component name="Black">
|
<component name="Black">
|
||||||
<option name="sdkName" value="Python 3.12 (FIO)" />
|
<option name="sdkName" value="Python 3.12 (FIO)" />
|
||||||
</component>
|
</component>
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.13 (FIO)" project-jdk-type="Python SDK" />
|
||||||
</project>
|
</project>
|
||||||
5
2025-11-30 final reporter/.env
Normal file
5
2025-11-30 final reporter/.env
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
WHATSAPP_TOKEN=EAAhOTShYLw4BQEh6HTO8fHmLHtbEXhZBEB03wgEXx1lVrcJkNHQlqPXZAlysqXIqse15bfL5V0kjNTnJ91kcK0DGkgpNtlzLaHbSgOdXaYUu9DOmSZACGAtqAj8nkSJz0ZA32Qz2BYJggyTxfkjwlT7rzMtGtbA9HwOA9AjEKG6JiiozBJmZCZA0nGBSx9JlHZCVQZDZD
|
||||||
|
PHONE_NUMBER_ID=420775735276
|
||||||
|
WHATSAPP_PHONE_NUMBER_ID=930187756843231
|
||||||
|
WHATSAPP_RECIPIENT_NUMBER=420775735276
|
||||||
|
WHATSAPP_TEST_NUMBER=15551451876
|
||||||
353
2025-11-30 final reporter/21ReadJSONmultipleaccounts.py
Normal file
353
2025-11-30 final reporter/21ReadJSONmultipleaccounts.py
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import io
|
||||||
|
|
||||||
|
# Force UTF-8 output for Scheduled Tasks
|
||||||
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
||||||
|
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
FIO MULTI–ACCOUNT IMPORTER — FULLY COMMENTED VERSION
|
||||||
|
====================================================
|
||||||
|
|
||||||
|
This script downloads transactions for **multiple Fio bank accounts**
|
||||||
|
(using their API tokens) and imports them into a MySQL database
|
||||||
|
(`fio.transactions` table).
|
||||||
|
|
||||||
|
It also saves the raw JSON responses into a folder structure
|
||||||
|
for backup / debugging / later use.
|
||||||
|
|
||||||
|
Main features:
|
||||||
|
• Reads all accounts from accounts.json
|
||||||
|
• Downloads last N days (default 90)
|
||||||
|
• Saves JSON files to disk
|
||||||
|
• Extracts all transactions with safe parsing
|
||||||
|
• Inserts into MySQL with ON DUPLICATE KEY UPDATE
|
||||||
|
• Efficient batch insertion (executemany)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from datetime import date, timedelta
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import requests # used to call Fio REST API
|
||||||
|
import pymysql # MySQL driver
|
||||||
|
|
||||||
|
|
||||||
|
# =========================================
|
||||||
|
# CONFIGURATION
|
||||||
|
# =========================================
|
||||||
|
|
||||||
|
# JSON file containing multiple account configs:
|
||||||
|
# [
|
||||||
|
# { "name": "CZK rodina", "account_number": "2100046291", "token": "xxx" },
|
||||||
|
# ...
|
||||||
|
# ]
|
||||||
|
ACCOUNTS_FILE = r"c:\users\vlado\PycharmProjects\FIO\accounts.json"
|
||||||
|
|
||||||
|
# Directory where raw JSON files from Fio API will be stored.
|
||||||
|
JSON_BASE_DIR = r"z:\Dropbox\!!!Days\Downloads Z230\Fio"
|
||||||
|
|
||||||
|
# MySQL connection parameters
|
||||||
|
DB = {
|
||||||
|
"host": "192.168.1.76",
|
||||||
|
"port": 3307,
|
||||||
|
"user": "root",
|
||||||
|
"password": "Vlado9674+",
|
||||||
|
"database": "fio",
|
||||||
|
"charset": "utf8mb4",
|
||||||
|
}
|
||||||
|
|
||||||
|
# How many transactions insert per batch (performance tuning)
|
||||||
|
BATCH_SIZE = 500
|
||||||
|
|
||||||
|
# How many days back we load from Fio (default = last 90 days)
|
||||||
|
DAYS_BACK = 90
|
||||||
|
|
||||||
|
|
||||||
|
# =========================================
|
||||||
|
# HELPERS
|
||||||
|
# =========================================
|
||||||
|
|
||||||
|
def load_accounts(path: str):
|
||||||
|
"""
|
||||||
|
Reads accounts.json and does simple validation to ensure
|
||||||
|
each entry contains: name, account_number, token.
|
||||||
|
"""
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
accounts = json.load(f)
|
||||||
|
|
||||||
|
for acc in accounts:
|
||||||
|
for key in ("name", "account_number", "token"):
|
||||||
|
if key not in acc:
|
||||||
|
raise ValueError(f"Missing '{key}' in account config: {acc}")
|
||||||
|
|
||||||
|
return accounts
|
||||||
|
|
||||||
|
|
||||||
|
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
|
||||||
|
"""
|
||||||
|
Constructs the exact URL for Fio REST API "periods" endpoint.
|
||||||
|
Example:
|
||||||
|
https://fioapi.fio.cz/v1/rest/periods/<token>/2025-01-01/2025-01-31/transactions.json
|
||||||
|
"""
|
||||||
|
from_str = d_from.strftime("%Y-%m-%d")
|
||||||
|
to_str = d_to.strftime("%Y-%m-%d")
|
||||||
|
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_fio_json(token: str, d_from: date, d_to: date):
|
||||||
|
"""
|
||||||
|
Calls Fio API and fetches JSON.
|
||||||
|
Handles HTTP errors and JSON decoding errors.
|
||||||
|
"""
|
||||||
|
url = fio_url_for_period(token, d_from, d_to)
|
||||||
|
resp = requests.get(url, timeout=30)
|
||||||
|
|
||||||
|
if resp.status_code != 200:
|
||||||
|
print(f" ❌ HTTP {resp.status_code} from Fio: {url}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return resp.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(" ❌ Cannot decode JSON from Fio response")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def safe_col(t: dict, n: int):
|
||||||
|
"""
|
||||||
|
SAFE ACCESSOR for Fio transaction column numbers.
|
||||||
|
|
||||||
|
Fio JSON schema example:
|
||||||
|
"column5": { "name": "VS", "value": "123456" }
|
||||||
|
|
||||||
|
But the structure is NOT guaranteed to exist.
|
||||||
|
So this function prevents KeyError or NoneType errors.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
t["columnN"]["value"] or None
|
||||||
|
"""
|
||||||
|
key = f"column{n}"
|
||||||
|
val = t.get(key)
|
||||||
|
if not val:
|
||||||
|
return None
|
||||||
|
return val.get("value")
|
||||||
|
|
||||||
|
|
||||||
|
def clean_date(dt_str: str):
|
||||||
|
"""
|
||||||
|
Fio returns dates like: "2025-02-14+0100"
|
||||||
|
We strip timezone → "2025-02-14"
|
||||||
|
"""
|
||||||
|
if not dt_str:
|
||||||
|
return None
|
||||||
|
return dt_str[:10]
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_dir(path: Path):
|
||||||
|
"""Creates directory if it doesn’t exist."""
|
||||||
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date):
|
||||||
|
"""
|
||||||
|
Saves raw JSON to:
|
||||||
|
<base_dir>/<account_number>/YYYY-MM-DD_to_YYYY-MM-DD.json
|
||||||
|
|
||||||
|
Useful for debugging, backups, or re-imports.
|
||||||
|
"""
|
||||||
|
acc_num_raw = account_cfg["account_number"]
|
||||||
|
acc_folder_name = acc_num_raw.replace("/", "_") # sanitize dir name for filesystem
|
||||||
|
|
||||||
|
out_dir = Path(base_dir) / acc_folder_name
|
||||||
|
ensure_dir(out_dir)
|
||||||
|
|
||||||
|
filename = f"{d_from.strftime('%Y-%m-%d')}_to_{d_to.strftime('%Y-%m-%d')}.json"
|
||||||
|
out_path = out_dir / filename
|
||||||
|
|
||||||
|
with open(out_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
|
return out_path
|
||||||
|
|
||||||
|
|
||||||
|
# =========================================
|
||||||
|
# MAIN IMPORT LOGIC
|
||||||
|
# =========================================
|
||||||
|
|
||||||
|
def main():
|
||||||
|
start_all = time.time()
|
||||||
|
|
||||||
|
# Calculate time range (last N days)
|
||||||
|
today = date.today()
|
||||||
|
d_from = today - timedelta(days=DAYS_BACK)
|
||||||
|
d_to = today
|
||||||
|
|
||||||
|
print("=== Fio multi-account import ===")
|
||||||
|
print(f"Období: {d_from} až {d_to}")
|
||||||
|
print("Načítám účty z JSON konfigurace...")
|
||||||
|
|
||||||
|
# Load all accounts from accounts.json
|
||||||
|
accounts = load_accounts(ACCOUNTS_FILE)
|
||||||
|
print(f" Účtů v konfiguraci: {len(accounts)}\n")
|
||||||
|
|
||||||
|
# Connect to database
|
||||||
|
conn = pymysql.connect(**DB)
|
||||||
|
cur = conn.cursor()
|
||||||
|
|
||||||
|
# SQL INSERT with ON DUPLICATE KEY UPDATE
|
||||||
|
# This means: if transaction already exists (same unique key), update it.
|
||||||
|
sql = """
|
||||||
|
INSERT INTO transactions
|
||||||
|
(
|
||||||
|
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
|
||||||
|
ks, vs, ss, zprava_pro_prijemce, poznamka,
|
||||||
|
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
|
||||||
|
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
|
||||||
|
)
|
||||||
|
VALUES
|
||||||
|
(
|
||||||
|
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
|
||||||
|
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
|
||||||
|
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
|
||||||
|
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
|
||||||
|
)
|
||||||
|
ON DUPLICATE KEY UPDATE
|
||||||
|
datum = VALUES(datum),
|
||||||
|
objem = VALUES(objem),
|
||||||
|
mena = VALUES(mena),
|
||||||
|
protiucet = VALUES(protiucet),
|
||||||
|
kod_banky = VALUES(kod_banky),
|
||||||
|
ks = VALUES(ks),
|
||||||
|
vs = VALUES(vs),
|
||||||
|
ss = VALUES(ss),
|
||||||
|
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
|
||||||
|
poznamka = VALUES(poznamka),
|
||||||
|
ks_1 = VALUES(ks_1),
|
||||||
|
nazev_banky = VALUES(nazev_banky),
|
||||||
|
nazev_protiuctu = VALUES(nazev_protiuctu),
|
||||||
|
ss_1 = VALUES(ss_1),
|
||||||
|
typ = VALUES(typ),
|
||||||
|
upresneni_objem = VALUES(upresneni_objem),
|
||||||
|
upresneni_mena = VALUES(upresneni_mena),
|
||||||
|
vs_1 = VALUES(vs_1),
|
||||||
|
zadal = VALUES(zadal)
|
||||||
|
"""
|
||||||
|
|
||||||
|
total_inserted = 0
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# PROCESS EACH ACCOUNT IN accounts.json
|
||||||
|
# ======================================================
|
||||||
|
for acc in accounts:
|
||||||
|
name = acc["name"]
|
||||||
|
cfg_acc_num = acc["account_number"]
|
||||||
|
token = acc["token"]
|
||||||
|
|
||||||
|
print(f"--- Účet: {name} ({cfg_acc_num}) ---")
|
||||||
|
t0 = time.time()
|
||||||
|
|
||||||
|
# --- 1) Download JSON from Fio API
|
||||||
|
data = fetch_fio_json(token, d_from, d_to)
|
||||||
|
if data is None:
|
||||||
|
print(" Přeskakuji, žádná data / chyba API.\n")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --- 2) Save raw JSON file to disk
|
||||||
|
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
|
||||||
|
print(f" JSON uložen do: {json_path}")
|
||||||
|
|
||||||
|
# --- 3) Extract transactions from JSON tree
|
||||||
|
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
|
||||||
|
|
||||||
|
# FIO can return single transaction as an object (not list)
|
||||||
|
if isinstance(tlist, dict):
|
||||||
|
tlist = [tlist]
|
||||||
|
|
||||||
|
print(f" Počet transakcí v období: {len(tlist)}")
|
||||||
|
|
||||||
|
if not tlist:
|
||||||
|
print(" Žádné transakce, jdu dál.\n")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# FIO returns account ID under accountStatement.info.accountId
|
||||||
|
fio_acc_id = data["accountStatement"]["info"]["accountId"]
|
||||||
|
|
||||||
|
# Warn if account ID in JSON doesn't match config (informational only)
|
||||||
|
if cfg_acc_num and cfg_acc_num.split("/")[0] not in fio_acc_id:
|
||||||
|
print(f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
|
||||||
|
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})")
|
||||||
|
|
||||||
|
# --- 4) Build list of MySQL rows
|
||||||
|
rows = []
|
||||||
|
for t in tlist:
|
||||||
|
row = {
|
||||||
|
"datum": clean_date(safe_col(t, 0)),
|
||||||
|
"objem": safe_col(t, 1),
|
||||||
|
"mena": safe_col(t, 14),
|
||||||
|
|
||||||
|
"cislo_uctu": fio_acc_id,
|
||||||
|
"protiucet": safe_col(t, 2),
|
||||||
|
"kod_banky": safe_col(t, 3),
|
||||||
|
|
||||||
|
"ks": safe_col(t, 4),
|
||||||
|
"vs": safe_col(t, 5),
|
||||||
|
"ss": safe_col(t, 6),
|
||||||
|
|
||||||
|
"zprava": safe_col(t, 16),
|
||||||
|
"poznamka": safe_col(t, 25),
|
||||||
|
|
||||||
|
"id_operace": safe_col(t, 22),
|
||||||
|
"id_pokynu": safe_col(t, 24),
|
||||||
|
|
||||||
|
"ks1": safe_col(t, 18),
|
||||||
|
"nazev_banky": safe_col(t, 15),
|
||||||
|
"nazev_protiuctu": safe_col(t, 10),
|
||||||
|
|
||||||
|
"ss1": safe_col(t, 19),
|
||||||
|
"typ": safe_col(t, 8),
|
||||||
|
|
||||||
|
"upr_objem": safe_col(t, 20),
|
||||||
|
"upr_mena": safe_col(t, 21),
|
||||||
|
"vs1": safe_col(t, 17),
|
||||||
|
|
||||||
|
"zadal": safe_col(t, 12),
|
||||||
|
}
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
# --- 5) INSERT rows into MySQL in batches
|
||||||
|
inserted = 0
|
||||||
|
|
||||||
|
for i in range(0, len(rows), BATCH_SIZE):
|
||||||
|
chunk = rows[i : i + BATCH_SIZE]
|
||||||
|
cur.executemany(sql, chunk) # fast multi-row insert/update
|
||||||
|
conn.commit()
|
||||||
|
inserted += len(chunk)
|
||||||
|
|
||||||
|
elapsed = time.time() - t0
|
||||||
|
total_inserted += inserted
|
||||||
|
|
||||||
|
print(f" ✓ Zapsáno (insert/update): {inserted} řádků do DB za {elapsed:.2f} s\n")
|
||||||
|
|
||||||
|
# Close DB
|
||||||
|
cur.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
total_elapsed = time.time() - start_all
|
||||||
|
|
||||||
|
print(f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
|
||||||
|
f"Celkový čas: {total_elapsed:.2f} s ===")
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# ENTRY POINT
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
320
2025-11-30 final reporter/30Report.py
Normal file
320
2025-11-30 final reporter/30Report.py
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import io
|
||||||
|
|
||||||
|
# Force UTF-8 output for Scheduled Tasks
|
||||||
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
||||||
|
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
|
||||||
|
|
||||||
|
"""
|
||||||
|
FIO EXPORT SCRIPT — FULLY COMMENTED VERSION
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
This script connects to your MySQL "fio" database,
|
||||||
|
reads all transactions, and exports them into a highly formatted
|
||||||
|
Excel workbook.
|
||||||
|
|
||||||
|
Excel file includes:
|
||||||
|
|
||||||
|
• First sheet: "ALL" → contains ALL transactions
|
||||||
|
• Additional sheets: one for each account from accounts.json
|
||||||
|
• First 5 CZK sheets appear first in custom order
|
||||||
|
• All formatting exactly preserved (colors, borders, widths, formulas)
|
||||||
|
|
||||||
|
Everything is generated automatically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import mysql.connector
|
||||||
|
from mysql.connector import Error
|
||||||
|
from openpyxl import Workbook
|
||||||
|
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# CONFIGURATION
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
# MySQL server parameters
|
||||||
|
DB_HOST = "192.168.1.76"
|
||||||
|
DB_PORT = 3307
|
||||||
|
DB_USER = "root"
|
||||||
|
DB_PASS = "Vlado9674+"
|
||||||
|
DB_NAME = "fio"
|
||||||
|
|
||||||
|
# Where to save Excel files
|
||||||
|
OUTPUT_DIR = r"Z:\Dropbox\!!!Days\Downloads Z230"
|
||||||
|
|
||||||
|
# JSON file with list of accounts (name + account_number)
|
||||||
|
ACCOUNTS_JSON = r"C:\Users\vlado\PycharmProjects\FIO\accounts.json"
|
||||||
|
|
||||||
|
# Columns that MUST be written as TEXT in Excel using ="value"
|
||||||
|
# to avoid Excel stripping zeros or changing formatting
|
||||||
|
TEXT_COLUMNS = ["cislo_uctu", "protiucet", "kod_banky", "ks", "vs", "ss"]
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# REMOVE OLD EXPORT FILES
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def cleanup_old_exports():
|
||||||
|
"""
|
||||||
|
Deletes older versions of exported XLSX files that match
|
||||||
|
specific filename patterns. This keeps your folder clean,
|
||||||
|
ensuring you only have the most recent export.
|
||||||
|
"""
|
||||||
|
patterns = [
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO*transaction*.xlsx"),
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO*transactions*.xlsx"),
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO_transactions*.xlsx"),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Check each pattern
|
||||||
|
for pattern in patterns:
|
||||||
|
for file in glob.glob(pattern):
|
||||||
|
try:
|
||||||
|
os.remove(file)
|
||||||
|
print(f"🗑 Deleted old export: {file}")
|
||||||
|
except:
|
||||||
|
# If file cannot be deleted (locked or permission denied),
|
||||||
|
# simply skip it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# CORE EXCEL FORMATTING FUNCTION
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def format_sheet(ws, rows, headers):
|
||||||
|
"""
|
||||||
|
Applies ALL formatting rules to a worksheet:
|
||||||
|
- Writes headers
|
||||||
|
- Writes all rows
|
||||||
|
- Converts selected columns to Excel text formulas
|
||||||
|
- Colors rows based on "objem" (red=negative, green=positive)
|
||||||
|
- Sets fixed column widths
|
||||||
|
- Adds borders to every cell
|
||||||
|
- Center-aligns first 10 columns
|
||||||
|
- Freezes header row and enables filtering
|
||||||
|
"""
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 1) Format HEADER row
|
||||||
|
# -------------------------------
|
||||||
|
for col_idx in range(1, len(headers) + 1):
|
||||||
|
cell = ws.cell(row=1, column=col_idx)
|
||||||
|
cell.font = Font(bold=True) # bold text
|
||||||
|
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid") # yellow background
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 2) Write DATA rows
|
||||||
|
# -------------------------------
|
||||||
|
for row in rows:
|
||||||
|
excel_row = []
|
||||||
|
for h in headers:
|
||||||
|
val = row[h]
|
||||||
|
|
||||||
|
# For text-sensitive columns, write ="value"
|
||||||
|
# This prevents Excel from stripping zeros or treating them as numbers.
|
||||||
|
if h in TEXT_COLUMNS and val is not None:
|
||||||
|
excel_row.append(f'="{val}"')
|
||||||
|
else:
|
||||||
|
excel_row.append(val)
|
||||||
|
|
||||||
|
ws.append(excel_row)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 3) Background coloring by "objem"
|
||||||
|
# -------------------------------
|
||||||
|
# Light red (ARGB) = negative
|
||||||
|
fill_red = PatternFill(start_color="FFFFDDDD", end_color="FFFFDDDD", fill_type="solid")
|
||||||
|
# Light green (ARGB) = positive or zero
|
||||||
|
fill_green = PatternFill(start_color="FFEEFFEE", end_color="FFEEFFEE", fill_type="solid")
|
||||||
|
|
||||||
|
# Find column index where "objem" is located
|
||||||
|
objem_col_index = headers.index("objem") + 1
|
||||||
|
|
||||||
|
# Apply row coloring
|
||||||
|
for row_idx in range(2, len(rows) + 2): # Start at row 2 (row 1 = header)
|
||||||
|
cell_objem = ws.cell(row=row_idx, column=objem_col_index)
|
||||||
|
|
||||||
|
# Convert objem to float
|
||||||
|
try:
|
||||||
|
value = float(cell_objem.value)
|
||||||
|
except:
|
||||||
|
value = 0
|
||||||
|
|
||||||
|
# Choose correct color
|
||||||
|
fill = fill_red if value < 0 else fill_green
|
||||||
|
|
||||||
|
# Apply fill to entire row
|
||||||
|
for col_idx in range(1, len(headers) + 1):
|
||||||
|
ws.cell(row=row_idx, column=col_idx).fill = fill
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 4) Fixed column widths
|
||||||
|
# -------------------------------
|
||||||
|
fixed_widths = [
|
||||||
|
6, 11, 11, 5, 14, 14, 8, 6, 13, 13,
|
||||||
|
50, 53, 12, 12, 5, 49, 29, 5, 29, 16,
|
||||||
|
15, 12, 49, 20
|
||||||
|
]
|
||||||
|
|
||||||
|
# Apply width using A, B, C... column names
|
||||||
|
for i, width in enumerate(fixed_widths, start=1):
|
||||||
|
col_letter = chr(64 + i) # convert 1 → 'A', 2 → 'B', ...
|
||||||
|
ws.column_dimensions[col_letter].width = width
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 5) Add borders + alignment
|
||||||
|
# -------------------------------
|
||||||
|
thin = Side(border_style="thin", color="000000")
|
||||||
|
border = Border(left=thin, right=thin, top=thin, bottom=thin)
|
||||||
|
align_center = Alignment(horizontal="center")
|
||||||
|
|
||||||
|
total_rows = len(rows) + 1
|
||||||
|
total_cols = len(headers)
|
||||||
|
|
||||||
|
for row_idx in range(1, total_rows + 1):
|
||||||
|
for col_idx in range(1, total_cols + 1):
|
||||||
|
cell = ws.cell(row=row_idx, column=col_idx)
|
||||||
|
cell.border = border # add border
|
||||||
|
|
||||||
|
# Center-align ONLY first 10 columns
|
||||||
|
if col_idx <= 10:
|
||||||
|
cell.alignment = align_center
|
||||||
|
|
||||||
|
# Freeze header row so it stays visible while scrolling
|
||||||
|
ws.freeze_panes = "A2"
|
||||||
|
|
||||||
|
# Enable auto filter on top row
|
||||||
|
ws.auto_filter.ref = ws.dimensions
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# MAIN EXPORT PROCESS
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def export_fio():
|
||||||
|
print("Connecting to MySQL...")
|
||||||
|
|
||||||
|
# Connect to MySQL database
|
||||||
|
try:
|
||||||
|
conn = mysql.connector.connect(
|
||||||
|
host=DB_HOST,
|
||||||
|
port=DB_PORT,
|
||||||
|
user=DB_USER,
|
||||||
|
password=DB_PASS,
|
||||||
|
database=DB_NAME
|
||||||
|
)
|
||||||
|
except Error as e:
|
||||||
|
print("❌ Failed to connect:", e)
|
||||||
|
return
|
||||||
|
|
||||||
|
cur = conn.cursor(dictionary=True)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Load accounts.json
|
||||||
|
# -------------------------------
|
||||||
|
with open(ACCOUNTS_JSON, "r", encoding="utf-8") as f:
|
||||||
|
accounts = json.load(f)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Define priority first sheets
|
||||||
|
# -------------------------------
|
||||||
|
preferred_order = [
|
||||||
|
"CZK rodina",
|
||||||
|
"CZK ordinace",
|
||||||
|
"CZK na jídlo",
|
||||||
|
"CZK TrialHelp",
|
||||||
|
"CZK maminka svojě věci"
|
||||||
|
]
|
||||||
|
|
||||||
|
accounts_sorted = []
|
||||||
|
|
||||||
|
# Step 1: add priority accounts first
|
||||||
|
for pref in preferred_order:
|
||||||
|
for acc in accounts:
|
||||||
|
if acc["name"] == pref:
|
||||||
|
accounts_sorted.append(acc)
|
||||||
|
|
||||||
|
# Step 2: add remaining accounts afterward
|
||||||
|
for acc in accounts:
|
||||||
|
if acc not in accounts_sorted:
|
||||||
|
accounts_sorted.append(acc)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Create a new Excel workbook
|
||||||
|
# -------------------------------
|
||||||
|
wb = Workbook()
|
||||||
|
wb.remove(wb.active) # remove default empty sheet
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# FIRST SHEET: ALL TRANSACTIONS
|
||||||
|
# -------------------------------
|
||||||
|
cur.execute("SELECT * FROM transactions ORDER BY datum DESC")
|
||||||
|
all_rows = cur.fetchall()
|
||||||
|
|
||||||
|
if all_rows:
|
||||||
|
headers = list(all_rows[0].keys())
|
||||||
|
ws_all = wb.create_sheet(title="ALL")
|
||||||
|
ws_all.append(headers)
|
||||||
|
format_sheet(ws_all, all_rows, headers)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# INDIVIDUAL SHEETS PER ACCOUNT
|
||||||
|
# -------------------------------
|
||||||
|
for acc in accounts_sorted:
|
||||||
|
acc_num = acc["account_number"]
|
||||||
|
sheet_name = acc["name"][:31] # Excel sheet name limit
|
||||||
|
|
||||||
|
print(f"➡ Creating sheet: {sheet_name}")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
SELECT *
|
||||||
|
FROM transactions
|
||||||
|
WHERE cislo_uctu = '{acc_num}'
|
||||||
|
ORDER BY datum DESC
|
||||||
|
"""
|
||||||
|
|
||||||
|
cur.execute(query)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
print(f"⚠ No data for {sheet_name}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
headers = list(rows[0].keys())
|
||||||
|
ws = wb.create_sheet(title=sheet_name)
|
||||||
|
ws.append(headers)
|
||||||
|
|
||||||
|
format_sheet(ws, rows, headers)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Save Excel file
|
||||||
|
# -------------------------------
|
||||||
|
|
||||||
|
cleanup_old_exports()
|
||||||
|
|
||||||
|
# File name includes timestamp
|
||||||
|
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
|
||||||
|
filename = f"{timestamp} FIO transactions.xlsx"
|
||||||
|
output_file = os.path.join(OUTPUT_DIR, filename)
|
||||||
|
|
||||||
|
wb.save(output_file)
|
||||||
|
|
||||||
|
print(f"✅ Export complete:\n{output_file}")
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# MAIN ENTRY POINT
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
export_fio()
|
||||||
5
2025-11-30 final reporter/40 test.py
Normal file
5
2025-11-30 final reporter/40 test.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from Functions import SendWhatsAppMessage, get_dropbox_root
|
||||||
|
|
||||||
|
SendWhatsAppMessage("Ahoj Vlado, úloha dokončena!")
|
||||||
|
|
||||||
|
print(get_dropbox_root())
|
||||||
108
2025-11-30 final reporter/Functions.py
Normal file
108
2025-11-30 final reporter/Functions.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# Function.py
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load .env variables once
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
WHATSAPP_TOKEN = os.getenv("WHATSAPP_TOKEN")
|
||||||
|
WHATSAPP_PHONE_ID = os.getenv("WHATSAPP_PHONE_NUMBER_ID")
|
||||||
|
WHATSAPP_RECIPIENT = os.getenv("WHATSAPP_RECIPIENT_NUMBER")
|
||||||
|
|
||||||
|
WAPI_URL = f"https://graph.facebook.com/v21.0/{WHATSAPP_PHONE_ID}/messages"
|
||||||
|
|
||||||
|
|
||||||
|
def SendWhatsAppMessage(message: str, retries: int = 3, delay: int = 2) -> bool:
|
||||||
|
"""
|
||||||
|
Sends a WhatsApp message using the WhatsApp Cloud API test number.
|
||||||
|
Automatically retries on failure.
|
||||||
|
|
||||||
|
:param message: Text to send.
|
||||||
|
:param retries: Number of retry attempts.
|
||||||
|
:param delay: Delay between retries (seconds).
|
||||||
|
:return: True if message sent successfully, False otherwise.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# --- safety check: missing config ---
|
||||||
|
if not WHATSAPP_TOKEN or not WHATSAPP_PHONE_ID or not WHATSAPP_RECIPIENT:
|
||||||
|
print("❌ WhatsApp API configuration missing in .env")
|
||||||
|
return False
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {WHATSAPP_TOKEN}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"messaging_product": "whatsapp",
|
||||||
|
"to": WHATSAPP_RECIPIENT,
|
||||||
|
"type": "text",
|
||||||
|
"text": {"body": message}
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- retry loop ---
|
||||||
|
for attempt in range(1, retries + 1):
|
||||||
|
try:
|
||||||
|
response = requests.post(WAPI_URL, headers=headers, json=payload, timeout=15)
|
||||||
|
status = response.status_code
|
||||||
|
|
||||||
|
if status == 200:
|
||||||
|
print(f"📨 WhatsApp message sent successfully (attempt {attempt})")
|
||||||
|
return True
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f"⚠️ WhatsApp API error (attempt {attempt}): {status} {response.text}")
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
print(f"⚠️ Network error (attempt {attempt}): {e}")
|
||||||
|
|
||||||
|
time.sleep(delay)
|
||||||
|
|
||||||
|
print("❌ Failed to send WhatsApp message after retries.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# -----------------------------------------------------
|
||||||
|
# Find Dropbox root by reading official info.json
|
||||||
|
# -----------------------------------------------------
|
||||||
|
|
||||||
|
def get_dropbox_root() -> str | None:
|
||||||
|
# """
|
||||||
|
# Returns the absolute Dropbox folder path by reading:
|
||||||
|
# C:\Users\<user>\AppData\Local\Dropbox\info.json
|
||||||
|
# This is 100% reliable even if Dropbox changes drive letter.
|
||||||
|
# """
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
localapp = os.environ.get("LOCALAPPDATA")
|
||||||
|
if not localapp:
|
||||||
|
print("⚠️ LOCALAPPDATA not found.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
info_path = os.path.join(localapp, "Dropbox", "info.json")
|
||||||
|
|
||||||
|
if not os.path.exists(info_path):
|
||||||
|
print(f"⚠️ Dropbox info.json not found at: {info_path}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(info_path, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Most users: `personal`
|
||||||
|
if "personal" in data and "path" in data["personal"]:
|
||||||
|
return data["personal"]["path"]
|
||||||
|
|
||||||
|
# Business Dropbox if used
|
||||||
|
if "business" in data and "path" in data["business"]:
|
||||||
|
return data["business"]["path"]
|
||||||
|
|
||||||
|
print("⚠️ Dropbox info.json missing 'path' in personal/business")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"⚠️ Error reading Dropbox info.json: {e}")
|
||||||
|
return None
|
||||||
92
2025-11-30 final reporter/RunFIOreport.py
Normal file
92
2025-11-30 final reporter/RunFIOreport.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import io
|
||||||
|
|
||||||
|
# Force UTF-8 output for Scheduled Tasks
|
||||||
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
||||||
|
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
|
||||||
|
|
||||||
|
|
||||||
|
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
SCRIPT1 = os.path.join(BASE_DIR, "21ReadJSONmultipleaccounts.py")
|
||||||
|
SCRIPT2 = os.path.join(BASE_DIR, "30Report.py")
|
||||||
|
LOG_DIR = os.path.join(BASE_DIR, "logs")
|
||||||
|
LOG_FILE = os.path.join(LOG_DIR, "FIOreport.log")
|
||||||
|
|
||||||
|
os.makedirs(LOG_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
# Optional WhatsApp notify
|
||||||
|
try:
|
||||||
|
from Functions import SendWhatsAppMessage
|
||||||
|
WHATSAPP_AVAILABLE = True
|
||||||
|
except Exception:
|
||||||
|
WHATSAPP_AVAILABLE = False
|
||||||
|
|
||||||
|
|
||||||
|
def write_log(text):
|
||||||
|
with open(LOG_FILE, "a", encoding="utf-8") as f:
|
||||||
|
f.write(text + "\n")
|
||||||
|
print(text)
|
||||||
|
|
||||||
|
|
||||||
|
def run_script(path):
|
||||||
|
write_log(f"\n[{datetime.now()}] ➡ Running: {os.path.basename(path)}")
|
||||||
|
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
write_log(f"❌ Script not found: {path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
[sys.executable, path],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
text=True,
|
||||||
|
encoding="utf-8",
|
||||||
|
errors="replace"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Log STDOUT live
|
||||||
|
for line in process.stdout:
|
||||||
|
write_log(line.rstrip())
|
||||||
|
|
||||||
|
# Log STDERR live
|
||||||
|
for line in process.stderr:
|
||||||
|
write_log("⚠️ " + line.rstrip())
|
||||||
|
|
||||||
|
process.wait()
|
||||||
|
return process.returncode == 0
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# ----------------------------------------------------------
|
||||||
|
if __name__ == "__main__":
|
||||||
|
write_log("\n====================== NEW RUN ======================")
|
||||||
|
|
||||||
|
ok1 = run_script(SCRIPT1)
|
||||||
|
ok2 = False
|
||||||
|
|
||||||
|
if ok1:
|
||||||
|
write_log("✔ Stage 1 OK")
|
||||||
|
time.sleep(1)
|
||||||
|
ok2 = run_script(SCRIPT2)
|
||||||
|
|
||||||
|
if ok1 and ok2:
|
||||||
|
write_log("✔ All stages completed successfully")
|
||||||
|
if WHATSAPP_AVAILABLE:
|
||||||
|
SendWhatsAppMessage("✔ FIO import + report hotový.")
|
||||||
|
else:
|
||||||
|
write_log("❌ SOME PART FAILED — check above for errors")
|
||||||
|
if WHATSAPP_AVAILABLE:
|
||||||
|
SendWhatsAppMessage("❌ FIO proces selhal. Zkontroluj log.")
|
||||||
|
|
||||||
|
write_log("======================== END ========================\n")
|
||||||
28
2025-11-30 final reporter/Whatsapptestsendin.py
Normal file
28
2025-11-30 final reporter/Whatsapptestsendin.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
TOKEN = os.getenv("WHATSAPP_TOKEN")
|
||||||
|
PHONE_NUMBER_ID = os.getenv("WHATSAPP_PHONE_NUMBER_ID")
|
||||||
|
RECIPIENT = os.getenv("WHATSAPP_RECIPIENT_NUMBER")
|
||||||
|
|
||||||
|
def send_whatsapp_message(text):
|
||||||
|
url = f"https://graph.facebook.com/v22.0/{PHONE_NUMBER_ID}/messages"
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {TOKEN}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
data = {
|
||||||
|
"messaging_product": "whatsapp",
|
||||||
|
"to": RECIPIENT,
|
||||||
|
"type": "text",
|
||||||
|
"text": {"body": text}
|
||||||
|
}
|
||||||
|
|
||||||
|
r = requests.post(url, headers=headers, json=data)
|
||||||
|
print("Status:", r.status_code)
|
||||||
|
print("Response:", r.text)
|
||||||
|
|
||||||
|
send_whatsapp_message("Ahoj Vlado! Test zpráva přes WhatsApp API 🔔")
|
||||||
53
2025-11-30 final reporter/accounts.json
Normal file
53
2025-11-30 final reporter/accounts.json
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "EUR tatínek 1",
|
||||||
|
"account_number": "2100074583",
|
||||||
|
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK rodina",
|
||||||
|
"account_number": "2100046291",
|
||||||
|
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "EUR TrialHelp",
|
||||||
|
"account_number": "2200787265",
|
||||||
|
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK tatínek",
|
||||||
|
"account_number": "2400046293",
|
||||||
|
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CHF tatínek",
|
||||||
|
"account_number": "2402161017",
|
||||||
|
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "EUR tatínek 2",
|
||||||
|
"account_number": "2500074582",
|
||||||
|
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK TrialHelp",
|
||||||
|
"account_number": "2900046548",
|
||||||
|
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK maminka svojě věci",
|
||||||
|
"account_number": "2003310572",
|
||||||
|
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK na jídlo",
|
||||||
|
"account_number": "2403310563",
|
||||||
|
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CZK ordinace",
|
||||||
|
"account_number": "2800046620",
|
||||||
|
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
164
2025-11-30 final reporter/logs/FIOreport.log
Normal file
164
2025-11-30 final reporter/logs/FIOreport.log
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
|
||||||
|
====================== NEW RUN ======================
|
||||||
|
|
||||||
|
[2025-11-30 19:25:09.332782] ➡ Running: 21ReadJSONmultipleaccounts.py
|
||||||
|
=== Fio multi-account import ===
|
||||||
|
Období: 2025-09-01 až 2025-11-30
|
||||||
|
Načítám účty z JSON konfigurace...
|
||||||
|
Účtů v konfiguraci: 10
|
||||||
|
|
||||||
|
--- Účet: EUR tatínek 1 (2100074583) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100074583\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 2
|
||||||
|
✓ Zapsáno (insert/update): 2 řádků do DB za 0.27 s
|
||||||
|
|
||||||
|
--- Účet: CZK rodina (2100046291) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100046291\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 307
|
||||||
|
✓ Zapsáno (insert/update): 307 řádků do DB za 0.37 s
|
||||||
|
|
||||||
|
--- Účet: EUR TrialHelp (2200787265) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2200787265\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 2
|
||||||
|
✓ Zapsáno (insert/update): 2 řádků do DB za 0.22 s
|
||||||
|
|
||||||
|
--- Účet: CZK tatínek (2400046293) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2400046293\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: CHF tatínek (2402161017) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2402161017\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: EUR tatínek 2 (2500074582) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2500074582\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: CZK TrialHelp (2900046548) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2900046548\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 6
|
||||||
|
✓ Zapsáno (insert/update): 6 řádků do DB za 0.23 s
|
||||||
|
|
||||||
|
--- Účet: CZK maminka svojě věci (2003310572) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2003310572\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 75
|
||||||
|
✓ Zapsáno (insert/update): 75 řádků do DB za 0.27 s
|
||||||
|
|
||||||
|
--- Účet: CZK na jídlo (2403310563) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2403310563\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 119
|
||||||
|
✓ Zapsáno (insert/update): 119 řádků do DB za 0.38 s
|
||||||
|
|
||||||
|
--- Účet: CZK ordinace (2800046620) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2800046620\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 126
|
||||||
|
✓ Zapsáno (insert/update): 126 řádků do DB za 0.31 s
|
||||||
|
|
||||||
|
=== Hotovo. Celkem zapsáno 637 transakcí. Celkový čas: 2.94 s ===
|
||||||
|
⚠️ Could not find platform independent libraries <prefix>
|
||||||
|
✔ Stage 1 OK
|
||||||
|
|
||||||
|
[2025-11-30 19:25:13.462554] ➡ Running: 30Report.py
|
||||||
|
Connecting to MySQL...
|
||||||
|
➡ Creating sheet: CZK rodina
|
||||||
|
➡ Creating sheet: CZK ordinace
|
||||||
|
➡ Creating sheet: CZK na jídlo
|
||||||
|
➡ Creating sheet: CZK TrialHelp
|
||||||
|
➡ Creating sheet: CZK maminka svojě věci
|
||||||
|
➡ Creating sheet: EUR tatínek 1
|
||||||
|
➡ Creating sheet: EUR TrialHelp
|
||||||
|
➡ Creating sheet: CZK tatínek
|
||||||
|
➡ Creating sheet: CHF tatínek
|
||||||
|
➡ Creating sheet: EUR tatínek 2
|
||||||
|
🗑 Deleted old export: Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-22-48 FIO transactions.xlsx
|
||||||
|
✅ Export complete:
|
||||||
|
Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-25-54 FIO transactions.xlsx
|
||||||
|
⚠️ Could not find platform independent libraries <prefix>
|
||||||
|
✔ All stages completed successfully
|
||||||
|
======================== END ========================
|
||||||
|
|
||||||
|
|
||||||
|
====================== NEW RUN ======================
|
||||||
|
|
||||||
|
[2025-11-30 19:30:19.846254] ➡ Running: 21ReadJSONmultipleaccounts.py
|
||||||
|
=== Fio multi-account import ===
|
||||||
|
Období: 2025-09-01 až 2025-11-30
|
||||||
|
Načítám účty z JSON konfigurace...
|
||||||
|
Účtů v konfiguraci: 10
|
||||||
|
|
||||||
|
--- Účet: EUR tatínek 1 (2100074583) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100074583\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 2
|
||||||
|
✓ Zapsáno (insert/update): 2 řádků do DB za 0.26 s
|
||||||
|
|
||||||
|
--- Účet: CZK rodina (2100046291) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2100046291\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 307
|
||||||
|
✓ Zapsáno (insert/update): 307 řádků do DB za 0.37 s
|
||||||
|
|
||||||
|
--- Účet: EUR TrialHelp (2200787265) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2200787265\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 2
|
||||||
|
✓ Zapsáno (insert/update): 2 řádků do DB za 0.23 s
|
||||||
|
|
||||||
|
--- Účet: CZK tatínek (2400046293) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2400046293\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: CHF tatínek (2402161017) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2402161017\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: EUR tatínek 2 (2500074582) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2500074582\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 0
|
||||||
|
Žádné transakce, jdu dál.
|
||||||
|
|
||||||
|
--- Účet: CZK TrialHelp (2900046548) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2900046548\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 6
|
||||||
|
✓ Zapsáno (insert/update): 6 řádků do DB za 0.31 s
|
||||||
|
|
||||||
|
--- Účet: CZK maminka svojě věci (2003310572) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2003310572\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 75
|
||||||
|
✓ Zapsáno (insert/update): 75 řádků do DB za 0.37 s
|
||||||
|
|
||||||
|
--- Účet: CZK na jídlo (2403310563) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2403310563\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 119
|
||||||
|
✓ Zapsáno (insert/update): 119 řádků do DB za 0.30 s
|
||||||
|
|
||||||
|
--- Účet: CZK ordinace (2800046620) ---
|
||||||
|
JSON uložen do: z:\Dropbox\!!!Days\Downloads Z230\Fio\2800046620\2025-09-01_to_2025-11-30.json
|
||||||
|
Počet transakcí v období: 126
|
||||||
|
✓ Zapsáno (insert/update): 126 řádků do DB za 0.30 s
|
||||||
|
|
||||||
|
=== Hotovo. Celkem zapsáno 637 transakcí. Celkový čas: 2.81 s ===
|
||||||
|
⚠️ Could not find platform independent libraries <prefix>
|
||||||
|
✔ Stage 1 OK
|
||||||
|
|
||||||
|
[2025-11-30 19:30:23.822641] ➡ Running: 30Report.py
|
||||||
|
Connecting to MySQL...
|
||||||
|
➡ Creating sheet: CZK rodina
|
||||||
|
➡ Creating sheet: CZK ordinace
|
||||||
|
➡ Creating sheet: CZK na jídlo
|
||||||
|
➡ Creating sheet: CZK TrialHelp
|
||||||
|
➡ Creating sheet: CZK maminka svojě věci
|
||||||
|
➡ Creating sheet: EUR tatínek 1
|
||||||
|
➡ Creating sheet: EUR TrialHelp
|
||||||
|
➡ Creating sheet: CZK tatínek
|
||||||
|
➡ Creating sheet: CHF tatínek
|
||||||
|
➡ Creating sheet: EUR tatínek 2
|
||||||
|
🗑 Deleted old export: Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-25-54 FIO transactions.xlsx
|
||||||
|
✅ Export complete:
|
||||||
|
Z:\Dropbox\!!!Days\Downloads Z230\2025-11-30 19-31-02 FIO transactions.xlsx
|
||||||
|
⚠️ Could not find platform independent libraries <prefix>
|
||||||
|
✔ All stages completed successfully
|
||||||
|
======================== END ========================
|
||||||
|
|
||||||
@@ -1,42 +1,79 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
FIO MULTI–ACCOUNT IMPORTER — FULLY COMMENTED VERSION
|
||||||
|
====================================================
|
||||||
|
|
||||||
|
This script downloads transactions for **multiple Fio bank accounts**
|
||||||
|
(using their API tokens) and imports them into a MySQL database
|
||||||
|
(`fio.transactions` table).
|
||||||
|
|
||||||
|
It also saves the raw JSON responses into a folder structure
|
||||||
|
for backup / debugging / later use.
|
||||||
|
|
||||||
|
Main features:
|
||||||
|
• Reads all accounts from accounts.json
|
||||||
|
• Downloads last N days (default 90)
|
||||||
|
• Saves JSON files to disk
|
||||||
|
• Extracts all transactions with safe parsing
|
||||||
|
• Inserts into MySQL with ON DUPLICATE KEY UPDATE
|
||||||
|
• Efficient batch insertion (executemany)
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import requests
|
import requests # used to call Fio REST API
|
||||||
import pymysql
|
import pymysql # MySQL driver
|
||||||
|
|
||||||
|
|
||||||
# =========================================
|
# =========================================
|
||||||
# CONFIG
|
# CONFIGURATION
|
||||||
# =========================================
|
# =========================================
|
||||||
ACCOUNTS_FILE = r"u:\PycharmProjects\FIO\accounts.json"
|
|
||||||
JSON_BASE_DIR = r"u:\Dropbox\!!!Days\Downloads Z230\Fio" # kam se budou ukládat JSONy
|
|
||||||
|
|
||||||
|
# JSON file containing multiple account configs:
|
||||||
|
# [
|
||||||
|
# { "name": "CZK rodina", "account_number": "2100046291", "token": "xxx" },
|
||||||
|
# ...
|
||||||
|
# ]
|
||||||
|
ACCOUNTS_FILE = r"c:\users\vlado\PycharmProjects\FIO\accounts.json"
|
||||||
|
|
||||||
|
# Directory where raw JSON files from Fio API will be stored.
|
||||||
|
JSON_BASE_DIR = r"z:\Dropbox\!!!Days\Downloads Z230\Fio"
|
||||||
|
|
||||||
|
# MySQL connection parameters
|
||||||
DB = {
|
DB = {
|
||||||
"host": "192.168.1.76",
|
"host": "192.168.1.76",
|
||||||
"port": 3307,
|
"port": 3307,
|
||||||
"user": "root",
|
"user": "root",
|
||||||
"password": "Vlado9674+", # uprav podle sebe / dej do .env
|
"password": "Vlado9674+",
|
||||||
"database": "fio",
|
"database": "fio",
|
||||||
"charset": "utf8mb4",
|
"charset": "utf8mb4",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# How many transactions insert per batch (performance tuning)
|
||||||
BATCH_SIZE = 500
|
BATCH_SIZE = 500
|
||||||
|
|
||||||
|
# How many days back we load from Fio (default = last 90 days)
|
||||||
DAYS_BACK = 90
|
DAYS_BACK = 90
|
||||||
|
|
||||||
|
|
||||||
# =========================================
|
# =========================================
|
||||||
# HELPERS
|
# HELPERS
|
||||||
# =========================================
|
# =========================================
|
||||||
|
|
||||||
def load_accounts(path: str):
|
def load_accounts(path: str):
|
||||||
|
"""
|
||||||
|
Reads accounts.json and does simple validation to ensure
|
||||||
|
each entry contains: name, account_number, token.
|
||||||
|
"""
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
accounts = json.load(f)
|
accounts = json.load(f)
|
||||||
|
|
||||||
# jednoduchá validace
|
|
||||||
for acc in accounts:
|
for acc in accounts:
|
||||||
for key in ("name", "account_number", "token"):
|
for key in ("name", "account_number", "token"):
|
||||||
if key not in acc:
|
if key not in acc:
|
||||||
@@ -46,17 +83,28 @@ def load_accounts(path: str):
|
|||||||
|
|
||||||
|
|
||||||
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
|
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
|
||||||
|
"""
|
||||||
|
Constructs the exact URL for Fio REST API "periods" endpoint.
|
||||||
|
Example:
|
||||||
|
https://fioapi.fio.cz/v1/rest/periods/<token>/2025-01-01/2025-01-31/transactions.json
|
||||||
|
"""
|
||||||
from_str = d_from.strftime("%Y-%m-%d")
|
from_str = d_from.strftime("%Y-%m-%d")
|
||||||
to_str = d_to.strftime("%Y-%m-%d")
|
to_str = d_to.strftime("%Y-%m-%d")
|
||||||
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
|
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
|
||||||
|
|
||||||
|
|
||||||
def fetch_fio_json(token: str, d_from: date, d_to: date):
|
def fetch_fio_json(token: str, d_from: date, d_to: date):
|
||||||
|
"""
|
||||||
|
Calls Fio API and fetches JSON.
|
||||||
|
Handles HTTP errors and JSON decoding errors.
|
||||||
|
"""
|
||||||
url = fio_url_for_period(token, d_from, d_to)
|
url = fio_url_for_period(token, d_from, d_to)
|
||||||
resp = requests.get(url, timeout=30)
|
resp = requests.get(url, timeout=30)
|
||||||
|
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
print(f" ❌ HTTP {resp.status_code} from Fio: {url}")
|
print(f" ❌ HTTP {resp.status_code} from Fio: {url}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return resp.json()
|
return resp.json()
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
@@ -66,11 +114,16 @@ def fetch_fio_json(token: str, d_from: date, d_to: date):
|
|||||||
|
|
||||||
def safe_col(t: dict, n: int):
|
def safe_col(t: dict, n: int):
|
||||||
"""
|
"""
|
||||||
Safely read t['columnN']['value'], i.e. Fio column.
|
SAFE ACCESSOR for Fio transaction column numbers.
|
||||||
Handles:
|
|
||||||
- missing columnN
|
Fio JSON schema example:
|
||||||
- columnN is None
|
"column5": { "name": "VS", "value": "123456" }
|
||||||
- missing 'value'
|
|
||||||
|
But the structure is NOT guaranteed to exist.
|
||||||
|
So this function prevents KeyError or NoneType errors.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
t["columnN"]["value"] or None
|
||||||
"""
|
"""
|
||||||
key = f"column{n}"
|
key = f"column{n}"
|
||||||
val = t.get(key)
|
val = t.get(key)
|
||||||
@@ -81,8 +134,8 @@ def safe_col(t: dict, n: int):
|
|||||||
|
|
||||||
def clean_date(dt_str: str):
|
def clean_date(dt_str: str):
|
||||||
"""
|
"""
|
||||||
Convert Fio date '2025-10-26+0200' -> '2025-10-26'
|
Fio returns dates like: "2025-02-14+0100"
|
||||||
Fio spec: date is always rrrr-mm-dd+GMT.
|
We strip timezone → "2025-02-14"
|
||||||
"""
|
"""
|
||||||
if not dt_str:
|
if not dt_str:
|
||||||
return None
|
return None
|
||||||
@@ -90,15 +143,19 @@ def clean_date(dt_str: str):
|
|||||||
|
|
||||||
|
|
||||||
def ensure_dir(path: Path):
|
def ensure_dir(path: Path):
|
||||||
|
"""Creates directory if it doesn’t exist."""
|
||||||
path.mkdir(parents=True, exist_ok=True)
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date):
|
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date):
|
||||||
"""
|
"""
|
||||||
Uloží JSON do podsložky dle čísla účtu, název souboru podle období.
|
Saves raw JSON to:
|
||||||
|
<base_dir>/<account_number>/YYYY-MM-DD_to_YYYY-MM-DD.json
|
||||||
|
|
||||||
|
Useful for debugging, backups, or re-imports.
|
||||||
"""
|
"""
|
||||||
acc_num_raw = account_cfg["account_number"]
|
acc_num_raw = account_cfg["account_number"]
|
||||||
acc_folder_name = acc_num_raw.replace("/", "_") # 2101234567_2700
|
acc_folder_name = acc_num_raw.replace("/", "_") # sanitize dir name for filesystem
|
||||||
|
|
||||||
out_dir = Path(base_dir) / acc_folder_name
|
out_dir = Path(base_dir) / acc_folder_name
|
||||||
ensure_dir(out_dir)
|
ensure_dir(out_dir)
|
||||||
@@ -113,28 +170,31 @@ def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from:
|
|||||||
|
|
||||||
|
|
||||||
# =========================================
|
# =========================================
|
||||||
# MAIN IMPORT
|
# MAIN IMPORT LOGIC
|
||||||
# =========================================
|
# =========================================
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
start_all = time.time()
|
start_all = time.time()
|
||||||
|
|
||||||
# období posledních 90 dní
|
# Calculate time range (last N days)
|
||||||
today = date.today()
|
today = date.today()
|
||||||
d_from = today - timedelta(days=DAYS_BACK)
|
d_from = today - timedelta(days=DAYS_BACK)
|
||||||
d_to = today
|
d_to = today
|
||||||
|
|
||||||
print(f"=== Fio multi-account import ===")
|
print("=== Fio multi-account import ===")
|
||||||
print(f"Období: {d_from} až {d_to}")
|
print(f"Období: {d_from} až {d_to}")
|
||||||
print("Načítám účty z JSON konfigurace...")
|
print("Načítám účty z JSON konfigurace...")
|
||||||
|
|
||||||
|
# Load all accounts from accounts.json
|
||||||
accounts = load_accounts(ACCOUNTS_FILE)
|
accounts = load_accounts(ACCOUNTS_FILE)
|
||||||
print(f" Účtů v konfiguraci: {len(accounts)}\n")
|
print(f" Účtů v konfiguraci: {len(accounts)}\n")
|
||||||
|
|
||||||
# Připojení do DB
|
# Connect to database
|
||||||
conn = pymysql.connect(**DB)
|
conn = pymysql.connect(**DB)
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
|
||||||
# SQL s ON DUPLICATE KEY UPDATE
|
# SQL INSERT with ON DUPLICATE KEY UPDATE
|
||||||
|
# This means: if transaction already exists (same unique key), update it.
|
||||||
sql = """
|
sql = """
|
||||||
INSERT INTO transactions
|
INSERT INTO transactions
|
||||||
(
|
(
|
||||||
@@ -174,6 +234,9 @@ def main():
|
|||||||
|
|
||||||
total_inserted = 0
|
total_inserted = 0
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# PROCESS EACH ACCOUNT IN accounts.json
|
||||||
|
# ======================================================
|
||||||
for acc in accounts:
|
for acc in accounts:
|
||||||
name = acc["name"]
|
name = acc["name"]
|
||||||
cfg_acc_num = acc["account_number"]
|
cfg_acc_num = acc["account_number"]
|
||||||
@@ -182,17 +245,20 @@ def main():
|
|||||||
print(f"--- Účet: {name} ({cfg_acc_num}) ---")
|
print(f"--- Účet: {name} ({cfg_acc_num}) ---")
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
|
|
||||||
|
# --- 1) Download JSON from Fio API
|
||||||
data = fetch_fio_json(token, d_from, d_to)
|
data = fetch_fio_json(token, d_from, d_to)
|
||||||
if data is None:
|
if data is None:
|
||||||
print(" Přeskakuji, žádná data / chyba API.\n")
|
print(" Přeskakuji, žádná data / chyba API.\n")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# volitelné uložení JSON
|
# --- 2) Save raw JSON file to disk
|
||||||
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
|
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
|
||||||
print(f" JSON uložen do: {json_path}")
|
print(f" JSON uložen do: {json_path}")
|
||||||
|
|
||||||
# extrakce transakcí
|
# --- 3) Extract transactions from JSON tree
|
||||||
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
|
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
|
||||||
|
|
||||||
|
# FIO can return single transaction as an object (not list)
|
||||||
if isinstance(tlist, dict):
|
if isinstance(tlist, dict):
|
||||||
tlist = [tlist]
|
tlist = [tlist]
|
||||||
|
|
||||||
@@ -202,13 +268,15 @@ def main():
|
|||||||
print(" Žádné transakce, jdu dál.\n")
|
print(" Žádné transakce, jdu dál.\n")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# FIO returns account ID under accountStatement.info.accountId
|
||||||
fio_acc_id = data["accountStatement"]["info"]["accountId"]
|
fio_acc_id = data["accountStatement"]["info"]["accountId"]
|
||||||
|
|
||||||
|
# Warn if account ID in JSON doesn't match config (informational only)
|
||||||
if cfg_acc_num and cfg_acc_num.split("/")[0] not in fio_acc_id:
|
if cfg_acc_num and cfg_acc_num.split("/")[0] not in fio_acc_id:
|
||||||
# jen varování, ne fatální chyba
|
|
||||||
print(f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
|
print(f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
|
||||||
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})")
|
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})")
|
||||||
|
|
||||||
# připravit řádky pro batch insert
|
# --- 4) Build list of MySQL rows
|
||||||
rows = []
|
rows = []
|
||||||
for t in tlist:
|
for t in tlist:
|
||||||
row = {
|
row = {
|
||||||
@@ -245,25 +313,33 @@ def main():
|
|||||||
}
|
}
|
||||||
rows.append(row)
|
rows.append(row)
|
||||||
|
|
||||||
# batch insert
|
# --- 5) INSERT rows into MySQL in batches
|
||||||
inserted = 0
|
inserted = 0
|
||||||
|
|
||||||
for i in range(0, len(rows), BATCH_SIZE):
|
for i in range(0, len(rows), BATCH_SIZE):
|
||||||
chunk = rows[i : i + BATCH_SIZE]
|
chunk = rows[i : i + BATCH_SIZE]
|
||||||
cur.executemany(sql, chunk)
|
cur.executemany(sql, chunk) # fast multi-row insert/update
|
||||||
conn.commit()
|
conn.commit()
|
||||||
inserted += len(chunk)
|
inserted += len(chunk)
|
||||||
|
|
||||||
elapsed = time.time() - t0
|
elapsed = time.time() - t0
|
||||||
total_inserted += inserted
|
total_inserted += inserted
|
||||||
|
|
||||||
print(f" ✓ Zapsáno (insert/update): {inserted} řádků do DB za {elapsed:.2f} s\n")
|
print(f" ✓ Zapsáno (insert/update): {inserted} řádků do DB za {elapsed:.2f} s\n")
|
||||||
|
|
||||||
|
# Close DB
|
||||||
cur.close()
|
cur.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
total_elapsed = time.time() - start_all
|
total_elapsed = time.time() - start_all
|
||||||
|
|
||||||
print(f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
|
print(f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
|
||||||
f"Celkový čas: {total_elapsed:.2f} s ===")
|
f"Celkový čas: {total_elapsed:.2f} s ===")
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# ENTRY POINT
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
313
30 Report.py
Normal file
313
30 Report.py
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
FIO EXPORT SCRIPT — FULLY COMMENTED VERSION
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
This script connects to your MySQL "fio" database,
|
||||||
|
reads all transactions, and exports them into a highly formatted
|
||||||
|
Excel workbook.
|
||||||
|
|
||||||
|
Excel file includes:
|
||||||
|
|
||||||
|
• First sheet: "ALL" → contains ALL transactions
|
||||||
|
• Additional sheets: one for each account from accounts.json
|
||||||
|
• First 5 CZK sheets appear first in custom order
|
||||||
|
• All formatting exactly preserved (colors, borders, widths, formulas)
|
||||||
|
|
||||||
|
Everything is generated automatically.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import mysql.connector
|
||||||
|
from mysql.connector import Error
|
||||||
|
from openpyxl import Workbook
|
||||||
|
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# CONFIGURATION
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
# MySQL server parameters
|
||||||
|
DB_HOST = "192.168.1.76"
|
||||||
|
DB_PORT = 3307
|
||||||
|
DB_USER = "root"
|
||||||
|
DB_PASS = "Vlado9674+"
|
||||||
|
DB_NAME = "fio"
|
||||||
|
|
||||||
|
# Where to save Excel files
|
||||||
|
OUTPUT_DIR = r"Z:\Dropbox\!!!Days\Downloads Z230"
|
||||||
|
|
||||||
|
# JSON file with list of accounts (name + account_number)
|
||||||
|
ACCOUNTS_JSON = r"C:\Users\vlado\PycharmProjects\FIO\accounts.json"
|
||||||
|
|
||||||
|
# Columns that MUST be written as TEXT in Excel using ="value"
|
||||||
|
# to avoid Excel stripping zeros or changing formatting
|
||||||
|
TEXT_COLUMNS = ["cislo_uctu", "protiucet", "kod_banky", "ks", "vs", "ss"]
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# REMOVE OLD EXPORT FILES
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def cleanup_old_exports():
|
||||||
|
"""
|
||||||
|
Deletes older versions of exported XLSX files that match
|
||||||
|
specific filename patterns. This keeps your folder clean,
|
||||||
|
ensuring you only have the most recent export.
|
||||||
|
"""
|
||||||
|
patterns = [
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO*transaction*.xlsx"),
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO*transactions*.xlsx"),
|
||||||
|
os.path.join(OUTPUT_DIR, "*FIO_transactions*.xlsx"),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Check each pattern
|
||||||
|
for pattern in patterns:
|
||||||
|
for file in glob.glob(pattern):
|
||||||
|
try:
|
||||||
|
os.remove(file)
|
||||||
|
print(f"🗑 Deleted old export: {file}")
|
||||||
|
except:
|
||||||
|
# If file cannot be deleted (locked or permission denied),
|
||||||
|
# simply skip it.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# CORE EXCEL FORMATTING FUNCTION
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def format_sheet(ws, rows, headers):
|
||||||
|
"""
|
||||||
|
Applies ALL formatting rules to a worksheet:
|
||||||
|
- Writes headers
|
||||||
|
- Writes all rows
|
||||||
|
- Converts selected columns to Excel text formulas
|
||||||
|
- Colors rows based on "objem" (red=negative, green=positive)
|
||||||
|
- Sets fixed column widths
|
||||||
|
- Adds borders to every cell
|
||||||
|
- Center-aligns first 10 columns
|
||||||
|
- Freezes header row and enables filtering
|
||||||
|
"""
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 1) Format HEADER row
|
||||||
|
# -------------------------------
|
||||||
|
for col_idx in range(1, len(headers) + 1):
|
||||||
|
cell = ws.cell(row=1, column=col_idx)
|
||||||
|
cell.font = Font(bold=True) # bold text
|
||||||
|
cell.fill = PatternFill(start_color="FFFF00", fill_type="solid") # yellow background
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 2) Write DATA rows
|
||||||
|
# -------------------------------
|
||||||
|
for row in rows:
|
||||||
|
excel_row = []
|
||||||
|
for h in headers:
|
||||||
|
val = row[h]
|
||||||
|
|
||||||
|
# For text-sensitive columns, write ="value"
|
||||||
|
# This prevents Excel from stripping zeros or treating them as numbers.
|
||||||
|
if h in TEXT_COLUMNS and val is not None:
|
||||||
|
excel_row.append(f'="{val}"')
|
||||||
|
else:
|
||||||
|
excel_row.append(val)
|
||||||
|
|
||||||
|
ws.append(excel_row)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 3) Background coloring by "objem"
|
||||||
|
# -------------------------------
|
||||||
|
# Light red (ARGB) = negative
|
||||||
|
fill_red = PatternFill(start_color="FFFFDDDD", end_color="FFFFDDDD", fill_type="solid")
|
||||||
|
# Light green (ARGB) = positive or zero
|
||||||
|
fill_green = PatternFill(start_color="FFEEFFEE", end_color="FFEEFFEE", fill_type="solid")
|
||||||
|
|
||||||
|
# Find column index where "objem" is located
|
||||||
|
objem_col_index = headers.index("objem") + 1
|
||||||
|
|
||||||
|
# Apply row coloring
|
||||||
|
for row_idx in range(2, len(rows) + 2): # Start at row 2 (row 1 = header)
|
||||||
|
cell_objem = ws.cell(row=row_idx, column=objem_col_index)
|
||||||
|
|
||||||
|
# Convert objem to float
|
||||||
|
try:
|
||||||
|
value = float(cell_objem.value)
|
||||||
|
except:
|
||||||
|
value = 0
|
||||||
|
|
||||||
|
# Choose correct color
|
||||||
|
fill = fill_red if value < 0 else fill_green
|
||||||
|
|
||||||
|
# Apply fill to entire row
|
||||||
|
for col_idx in range(1, len(headers) + 1):
|
||||||
|
ws.cell(row=row_idx, column=col_idx).fill = fill
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 4) Fixed column widths
|
||||||
|
# -------------------------------
|
||||||
|
fixed_widths = [
|
||||||
|
6, 11, 11, 5, 14, 14, 8, 6, 13, 13,
|
||||||
|
50, 53, 12, 12, 5, 49, 29, 5, 29, 16,
|
||||||
|
15, 12, 49, 20
|
||||||
|
]
|
||||||
|
|
||||||
|
# Apply width using A, B, C... column names
|
||||||
|
for i, width in enumerate(fixed_widths, start=1):
|
||||||
|
col_letter = chr(64 + i) # convert 1 → 'A', 2 → 'B', ...
|
||||||
|
ws.column_dimensions[col_letter].width = width
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# 5) Add borders + alignment
|
||||||
|
# -------------------------------
|
||||||
|
thin = Side(border_style="thin", color="000000")
|
||||||
|
border = Border(left=thin, right=thin, top=thin, bottom=thin)
|
||||||
|
align_center = Alignment(horizontal="center")
|
||||||
|
|
||||||
|
total_rows = len(rows) + 1
|
||||||
|
total_cols = len(headers)
|
||||||
|
|
||||||
|
for row_idx in range(1, total_rows + 1):
|
||||||
|
for col_idx in range(1, total_cols + 1):
|
||||||
|
cell = ws.cell(row=row_idx, column=col_idx)
|
||||||
|
cell.border = border # add border
|
||||||
|
|
||||||
|
# Center-align ONLY first 10 columns
|
||||||
|
if col_idx <= 10:
|
||||||
|
cell.alignment = align_center
|
||||||
|
|
||||||
|
# Freeze header row so it stays visible while scrolling
|
||||||
|
ws.freeze_panes = "A2"
|
||||||
|
|
||||||
|
# Enable auto filter on top row
|
||||||
|
ws.auto_filter.ref = ws.dimensions
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# MAIN EXPORT PROCESS
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
def export_fio():
|
||||||
|
print("Connecting to MySQL...")
|
||||||
|
|
||||||
|
# Connect to MySQL database
|
||||||
|
try:
|
||||||
|
conn = mysql.connector.connect(
|
||||||
|
host=DB_HOST,
|
||||||
|
port=DB_PORT,
|
||||||
|
user=DB_USER,
|
||||||
|
password=DB_PASS,
|
||||||
|
database=DB_NAME
|
||||||
|
)
|
||||||
|
except Error as e:
|
||||||
|
print("❌ Failed to connect:", e)
|
||||||
|
return
|
||||||
|
|
||||||
|
cur = conn.cursor(dictionary=True)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Load accounts.json
|
||||||
|
# -------------------------------
|
||||||
|
with open(ACCOUNTS_JSON, "r", encoding="utf-8") as f:
|
||||||
|
accounts = json.load(f)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Define priority first sheets
|
||||||
|
# -------------------------------
|
||||||
|
preferred_order = [
|
||||||
|
"CZK rodina",
|
||||||
|
"CZK ordinace",
|
||||||
|
"CZK na jídlo",
|
||||||
|
"CZK TrialHelp",
|
||||||
|
"CZK maminka svojě věci"
|
||||||
|
]
|
||||||
|
|
||||||
|
accounts_sorted = []
|
||||||
|
|
||||||
|
# Step 1: add priority accounts first
|
||||||
|
for pref in preferred_order:
|
||||||
|
for acc in accounts:
|
||||||
|
if acc["name"] == pref:
|
||||||
|
accounts_sorted.append(acc)
|
||||||
|
|
||||||
|
# Step 2: add remaining accounts afterward
|
||||||
|
for acc in accounts:
|
||||||
|
if acc not in accounts_sorted:
|
||||||
|
accounts_sorted.append(acc)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Create a new Excel workbook
|
||||||
|
# -------------------------------
|
||||||
|
wb = Workbook()
|
||||||
|
wb.remove(wb.active) # remove default empty sheet
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# FIRST SHEET: ALL TRANSACTIONS
|
||||||
|
# -------------------------------
|
||||||
|
cur.execute("SELECT * FROM transactions ORDER BY datum DESC")
|
||||||
|
all_rows = cur.fetchall()
|
||||||
|
|
||||||
|
if all_rows:
|
||||||
|
headers = list(all_rows[0].keys())
|
||||||
|
ws_all = wb.create_sheet(title="ALL")
|
||||||
|
ws_all.append(headers)
|
||||||
|
format_sheet(ws_all, all_rows, headers)
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# INDIVIDUAL SHEETS PER ACCOUNT
|
||||||
|
# -------------------------------
|
||||||
|
for acc in accounts_sorted:
|
||||||
|
acc_num = acc["account_number"]
|
||||||
|
sheet_name = acc["name"][:31] # Excel sheet name limit
|
||||||
|
|
||||||
|
print(f"➡ Creating sheet: {sheet_name}")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
SELECT *
|
||||||
|
FROM transactions
|
||||||
|
WHERE cislo_uctu = '{acc_num}'
|
||||||
|
ORDER BY datum DESC
|
||||||
|
"""
|
||||||
|
|
||||||
|
cur.execute(query)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
print(f"⚠ No data for {sheet_name}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
headers = list(rows[0].keys())
|
||||||
|
ws = wb.create_sheet(title=sheet_name)
|
||||||
|
ws.append(headers)
|
||||||
|
|
||||||
|
format_sheet(ws, rows, headers)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# -------------------------------
|
||||||
|
# Save Excel file
|
||||||
|
# -------------------------------
|
||||||
|
|
||||||
|
cleanup_old_exports()
|
||||||
|
|
||||||
|
# File name includes timestamp
|
||||||
|
timestamp = datetime.now().strftime("%Y-%m-%d %H-%M-%S")
|
||||||
|
filename = f"{timestamp} FIO transactions.xlsx"
|
||||||
|
output_file = os.path.join(OUTPUT_DIR, filename)
|
||||||
|
|
||||||
|
wb.save(output_file)
|
||||||
|
|
||||||
|
print(f"✅ Export complete:\n{output_file}")
|
||||||
|
|
||||||
|
|
||||||
|
# ======================================================
|
||||||
|
# MAIN ENTRY POINT
|
||||||
|
# ======================================================
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
export_fio()
|
||||||
7
31 Python.py
Normal file
7
31 Python.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
DROPBOX = os.path.join(os.path.expanduser("~"), "Dropbox")
|
||||||
|
SHARED_PATH = os.path.join(DROPBOX, "PycharmShared")
|
||||||
|
GLOBAL_ENV = os.path.join(SHARED_PATH, ".env")
|
||||||
|
|
||||||
|
print(DROPBOX)
|
||||||
BIN
test_text.xlsx
Normal file
BIN
test_text.xlsx
Normal file
Binary file not shown.
Reference in New Issue
Block a user