This commit is contained in:
2025-11-25 20:43:37 +01:00
parent db2fe7e990
commit d75930f6e9
6 changed files with 747 additions and 14 deletions

1
.env Normal file
View File

@@ -0,0 +1 @@
FIO_TOKEN="v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"

View File

@@ -1,24 +1,24 @@
import requests import requests
import json import json
from datetime import date, timedelta
from dotenv import load_dotenv
import os
# Replace with your actual token load_dotenv()
API_TOKEN = "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
# Example: download last 30 days of transactions in JSON API_TOKEN = os.getenv("FIO_TOKEN")
url = f"https://fioapi.fio.cz/v1/rest/periods/v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0/2000-01-01/2025-07-23/transactions.json"
# Last 30 days
start = (date.today() - timedelta(days=10000)).strftime("%Y-%m-%d")
end = date.today().strftime("%Y-%m-%d")
url = f"https://fioapi.fio.cz/v1/rest/periods/{API_TOKEN}/{start}/{end}/transactions.json"
response = requests.get(url) response = requests.get(url)
print(response) print(response.status_code)
data = response.json() data = response.json()
with open(r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json", "w", encoding="utf-8") as f: with open(r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json", "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=4) json.dump(data, f, ensure_ascii=False, indent=4)
#
# # Print some info
# for trans in data['accountStatement']['transactionList']['transaction']:
# print(f"Date: {trans['column0']['value']}")
# print(f"Amount: {trans['column1']['value']}")
# print(f"Currency: {trans['column14']['value']}")
# print(f"Sender/Receiver: {trans['column10']['value']}")
# print(f"Message: {trans['column16']['value']}")
# print("-" * 40)

171
20 ReadJSON.py Normal file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import pymysql
import time
# =========================================
# CONFIG
# =========================================
JSON_PATH = r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500 # how many rows per executemany()
# =========================================
# HELPERS
# =========================================
def col(t, n):
"""
Safely read t['columnN']['value'], even if:
- columnN missing
- columnN is None
- value is None
"""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt):
"""
Convert Fio date '2025-10-26+0200' -> '2025-10-26'
(Fio spec: date is always rrrr-mm-dd+GMT)
"""
if not dt:
return None
return dt[:10]
# =========================================
# LOAD JSON
# =========================================
start_time = time.time()
with open(JSON_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
transactions = data["accountStatement"]["transactionList"].get("transaction", [])
# If only one transaction, Fio may return a dict instead of list
if isinstance(transactions, dict):
transactions = [transactions]
print(f"Loaded {len(transactions)} transactions from JSON")
account_number = data["accountStatement"]["info"]["accountId"]
# =========================================
# BUILD ROWS FOR BULK INSERT
# =========================================
rows = []
for t in transactions:
row = {
"datum": clean_date(col(t, 0)),
"objem": col(t, 1),
"mena": col(t, 14),
"cislo_uctu": account_number,
"protiucet": col(t, 2),
"kod_banky": col(t, 3),
"ks": col(t, 4),
"vs": col(t, 5),
"ss": col(t, 6),
"zprava": col(t, 16),
"poznamka": col(t, 25),
"id_operace": col(t, 22),
"id_pokynu": col(t, 24),
"ks1": col(t, 18),
"nazev_banky": col(t, 15),
"nazev_protiuctu": col(t, 10),
"ss1": col(t, 19),
"typ": col(t, 8),
"upr_objem": col(t, 20),
"upr_mena": col(t, 21),
"vs1": col(t, 17),
"zadal": col(t, 12),
}
rows.append(row)
print(f"Prepared {len(rows)} rows for DB insert/update")
# =========================================
# MYSQL INSERT (BATCHED)
# =========================================
conn = pymysql.connect(**DB)
cur = conn.cursor()
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum = VALUES(datum),
objem = VALUES(objem),
mena = VALUES(mena),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
ks = VALUES(ks),
vs = VALUES(vs),
ss = VALUES(ss),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
poznamka = VALUES(poznamka),
ks_1 = VALUES(ks_1),
nazev_banky = VALUES(nazev_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
ss_1 = VALUES(ss_1),
typ = VALUES(typ),
upresneni_objem = VALUES(upresneni_objem),
upresneni_mena = VALUES(upresneni_mena),
vs_1 = VALUES(vs_1),
zadal = VALUES(zadal)
"""
total = len(rows)
inserted = 0
for i in range(0, total, BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
# optional progress info:
# print(f"Committed {inserted}/{total} rows")
cur.close()
conn.close()
elapsed = time.time() - start_time
print(f"✓ Imported {inserted} transactions into MySQL in {elapsed:.2f} seconds.")

View File

@@ -0,0 +1,269 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import json
import time
from datetime import date, timedelta
from pathlib import Path
import requests
import pymysql
# =========================================
# CONFIG
# =========================================
ACCOUNTS_FILE = r"u:\PycharmProjects\FIO\accounts.json"
JSON_BASE_DIR = r"u:\Dropbox\!!!Days\Downloads Z230\Fio" # kam se budou ukládat JSONy
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+", # uprav podle sebe / dej do .env
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
DAYS_BACK = 90
# =========================================
# HELPERS
# =========================================
def load_accounts(path: str):
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
# jednoduchá validace
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing '{key}' in account config: {acc}")
return accounts
def fio_url_for_period(token: str, d_from: date, d_to: date) -> str:
from_str = d_from.strftime("%Y-%m-%d")
to_str = d_to.strftime("%Y-%m-%d")
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{from_str}/{to_str}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date):
url = fio_url_for_period(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f" ❌ HTTP {resp.status_code} from Fio: {url}")
return None
try:
return resp.json()
except json.JSONDecodeError:
print(" ❌ Cannot decode JSON from Fio response")
return None
def safe_col(t: dict, n: int):
"""
Safely read t['columnN']['value'], i.e. Fio column.
Handles:
- missing columnN
- columnN is None
- missing 'value'
"""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt_str: str):
"""
Convert Fio date '2025-10-26+0200' -> '2025-10-26'
Fio spec: date is always rrrr-mm-dd+GMT.
"""
if not dt_str:
return None
return dt_str[:10]
def ensure_dir(path: Path):
path.mkdir(parents=True, exist_ok=True)
def save_json_for_account(base_dir: str, account_cfg: dict, data: dict, d_from: date, d_to: date):
"""
Uloží JSON do podsložky dle čísla účtu, název souboru podle období.
"""
acc_num_raw = account_cfg["account_number"]
acc_folder_name = acc_num_raw.replace("/", "_") # 2101234567_2700
out_dir = Path(base_dir) / acc_folder_name
ensure_dir(out_dir)
filename = f"{d_from.strftime('%Y-%m-%d')}_to_{d_to.strftime('%Y-%m-%d')}.json"
out_path = out_dir / filename
with open(out_path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return out_path
# =========================================
# MAIN IMPORT
# =========================================
def main():
start_all = time.time()
# období posledních 90 dní
today = date.today()
d_from = today - timedelta(days=DAYS_BACK)
d_to = today
print(f"=== Fio multi-account import ===")
print(f"Období: {d_from}{d_to}")
print("Načítám účty z JSON konfigurace...")
accounts = load_accounts(ACCOUNTS_FILE)
print(f" Účtů v konfiguraci: {len(accounts)}\n")
# Připojení do DB
conn = pymysql.connect(**DB)
cur = conn.cursor()
# SQL s ON DUPLICATE KEY UPDATE
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum = VALUES(datum),
objem = VALUES(objem),
mena = VALUES(mena),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
ks = VALUES(ks),
vs = VALUES(vs),
ss = VALUES(ss),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
poznamka = VALUES(poznamka),
ks_1 = VALUES(ks_1),
nazev_banky = VALUES(nazev_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
ss_1 = VALUES(ss_1),
typ = VALUES(typ),
upresneni_objem = VALUES(upresneni_objem),
upresneni_mena = VALUES(upresneni_mena),
vs_1 = VALUES(vs_1),
zadal = VALUES(zadal)
"""
total_inserted = 0
for acc in accounts:
name = acc["name"]
cfg_acc_num = acc["account_number"]
token = acc["token"]
print(f"--- Účet: {name} ({cfg_acc_num}) ---")
t0 = time.time()
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print(" Přeskakuji, žádná data / chyba API.\n")
continue
# volitelné uložení JSON
json_path = save_json_for_account(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f" JSON uložen do: {json_path}")
# extrakce transakcí
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f" Počet transakcí v období: {len(tlist)}")
if not tlist:
print(" Žádné transakce, jdu dál.\n")
continue
fio_acc_id = data["accountStatement"]["info"]["accountId"]
if cfg_acc_num and cfg_acc_num.split("/")[0] not in fio_acc_id:
# jen varování, ne fatální chyba
print(f" ⚠ Upozornění: accountId z Fio ({fio_acc_id}) "
f"se neshoduje s account_number v konfiguraci ({cfg_acc_num})")
# připravit řádky pro batch insert
rows = []
for t in tlist:
row = {
"datum": clean_date(safe_col(t, 0)),
"objem": safe_col(t, 1),
"mena": safe_col(t, 14),
"cislo_uctu": fio_acc_id,
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"ks": safe_col(t, 4),
"vs": safe_col(t, 5),
"ss": safe_col(t, 6),
"zprava": safe_col(t, 16),
"poznamka": safe_col(t, 25),
"id_operace": safe_col(t, 22),
"id_pokynu": safe_col(t, 24),
"ks1": safe_col(t, 18),
"nazev_banky": safe_col(t, 15),
"nazev_protiuctu": safe_col(t, 10),
"ss1": safe_col(t, 19),
"typ": safe_col(t, 8),
"upr_objem": safe_col(t, 20),
"upr_mena": safe_col(t, 21),
"vs1": safe_col(t, 17),
"zadal": safe_col(t, 12),
}
rows.append(row)
# batch insert
inserted = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
elapsed = time.time() - t0
total_inserted += inserted
print(f" ✓ Zapsáno (insert/update): {inserted} řádků do DB za {elapsed:.2f} s\n")
cur.close()
conn.close()
total_elapsed = time.time() - start_all
print(f"=== Hotovo. Celkem zapsáno {total_inserted} transakcí. "
f"Celkový čas: {total_elapsed:.2f} s ===")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import json
import time
from datetime import date, timedelta
from pathlib import Path
import requests
import pymysql
# =========================================
# CONFIG
# =========================================
ACCOUNTS_FILE = r"u:\PycharmProjects\FIO\accounts.json"
JSON_BASE_DIR = r"u:\Dropbox\!!!Days\Downloads Z230\Fio"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500
# =========================================
# HELPERS
# =========================================
def load_accounts(path: str):
with open(path, "r", encoding="utf-8") as f:
accounts = json.load(f)
for acc in accounts:
for key in ("name", "account_number", "token"):
if key not in acc:
raise ValueError(f"Missing key '{key}' in {acc}")
return accounts
def fio_period_url(token: str, d_from: date, d_to: date):
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{d_from:%Y-%m-%d}/{d_to:%Y-%m-%d}/transactions.json"
def fetch_fio_json(token: str, d_from: date, d_to: date):
url = fio_period_url(token, d_from, d_to)
resp = requests.get(url, timeout=30)
if resp.status_code != 200:
print(f"❌ HTTP {resp.status_code} from Fio: {url}")
return None
try:
return resp.json()
except json.JSONDecodeError:
print("❌ JSON decode error")
return None
def safe_col(t, n):
key = f"column{n}"
v = t.get(key)
return None if not v else v.get("value")
def clean_date(dt):
if not dt:
return None
return dt[:10] # "YYYY-MM-DD"
def ensure_dir(p: Path):
p.mkdir(parents=True, exist_ok=True)
def save_json(base_dir, account_cfg, data, d_from, d_to):
folder = account_cfg["account_number"].replace("/", "_")
out = Path(base_dir) / folder
ensure_dir(out)
filename = f"FULL_{d_from:%Y-%m-%d}_to_{d_to:%Y-%m-%d}.json"
path = out / filename
with open(path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
return path
# =========================================
# MAIN
# =========================================
def main():
print("\n=== FULL HISTORY IMPORT (Fio extra-permissions token) ===\n")
accounts = load_accounts(ACCOUNTS_FILE)
# show accounts to user
for i, acc in enumerate(accounts, start=1):
print(f"{i}. {acc['name']} ({acc['account_number']})")
print()
while True:
try:
selection = int(input("Select account number (1..N): "))
if 1 <= selection <= len(accounts):
break
except:
pass
print("Invalid selection, try again.\n")
acc = accounts[selection - 1]
name = acc["name"]
token = acc["token"]
acc_num = acc["account_number"]
print(f"\nSelected: {name} ({acc_num})")
print("⚠ Make sure you generated the special 10-minute FULL-HISTORY TOKEN.")
input("Press ENTER to continue...")
# full 20-year history
today = date.today()
d_from = today.replace(year=today.year - 20)
d_to = today
print(f"\nDownloading ALL transactions from {d_from} to {d_to}")
start_time = time.time()
data = fetch_fio_json(token, d_from, d_to)
if data is None:
print("❌ Download failed")
return
# save JSON
json_path = save_json(JSON_BASE_DIR, acc, data, d_from, d_to)
print(f"JSON saved to {json_path}")
# extract transactions
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
if isinstance(tlist, dict):
tlist = [tlist]
print(f"Transactions loaded: {len(tlist)}")
if not tlist:
print("No transactions found, exiting.")
return
# MySQL
conn = pymysql.connect(**DB)
cur = conn.cursor()
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum=VALUES(datum),
objem=VALUES(objem),
mena=VALUES(mena),
protiucet=VALUES(protiucet),
kod_banky=VALUES(kod_banky),
ks=VALUES(ks),
vs=VALUES(vs),
ss=VALUES(ss),
zprava_pro_prijemce=VALUES(zprava_pro_prijemce),
poznamka=VALUES(poznamka),
ks_1=VALUES(ks_1),
nazev_banky=VALUES(nazev_banky),
nazev_protiuctu=VALUES(nazev_protiuctu),
ss_1=VALUES(ss_1),
typ=VALUES(typ),
upresneni_objem=VALUES(upresneni_objem),
upresneni_mena=VALUES(upresneni_mena),
vs_1=VALUES(vs_1),
zadal=VALUES(zadal)
"""
fio_acc_id = data["accountStatement"]["info"]["accountId"]
# build batch
rows = []
for t in tlist:
rows.append({
"datum": clean_date(safe_col(t, 0)),
"objem": safe_col(t, 1),
"mena": safe_col(t, 14),
"cislo_uctu": fio_acc_id,
"protiucet": safe_col(t, 2),
"kod_banky": safe_col(t, 3),
"ks": safe_col(t, 4),
"vs": safe_col(t, 5),
"ss": safe_col(t, 6),
"zprava": safe_col(t, 16),
"poznamka": safe_col(t, 25),
"id_operace": safe_col(t, 22),
"id_pokynu": safe_col(t, 24),
"ks1": safe_col(t, 18),
"nazev_banky": safe_col(t, 15),
"nazev_protiuctu": safe_col(t, 10),
"ss1": safe_col(t, 19),
"typ": safe_col(t, 8),
"upr_objem": safe_col(t, 20),
"upr_mena": safe_col(t, 21),
"vs1": safe_col(t, 17),
"zadal": safe_col(t, 12),
})
# batch insert
inserted = 0
for i in range(0, len(rows), BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
cur.close()
conn.close()
print(f"\n✓ Inserted/updated {inserted} transactions.")
print(f"Total time: {time.time() - start_time:.2f} s")
if __name__ == "__main__":
main()

53
accounts.json Normal file
View File

@@ -0,0 +1,53 @@
[
{
"name": "EUR tatínek 1",
"account_number": "2100074583",
"token": "GuV2Boaulx56ZiQUqUArgg6P9qdfEVKOoH6wF3PfAZ0fPS01r2WbiNiCsCcIBZ0U"
},
{
"name": "CZK rodina",
"account_number": "2100046291",
"token": "v0GJaAVeefzV1lnx1jPCf2nFF7SuOPzzrL5tobPNsC7oCChXG4hahDYVb8Rdcex0"
},
{
"name": "EUR TrialHelp",
"account_number": "2200787265",
"token": "9yG5g6lHWGS6YU2R2petm5DRYTb9orhJ8VPJ0p7RtTjlIo2vB83ynBlPCMGRIwzy"
},
{
"name": "CZK tatínek",
"account_number": "2400046293",
"token": "j2qmpvWe4RfKtBTBlhwC1VFED7HJlVAe23iPBH1TWis9htEyYe8fRejcMeSxOLqC"
},
{
"name": "CHF tatínek",
"account_number": "2402161017",
"token": "aNfK9iu6qIPlugGCR6gvSJ7NXtTkDfVVj8fBz4X1pORuGKf6VXjWin4wrr9WRjSd"
},
{
"name": "EUR tatínek 2",
"account_number": "2500074582",
"token": "aLsl9ETRUU1IgoYeinAzYWyruIoJvs6UvJKTGRlJcm7HaEc5ojsFdxJizyT9lREO"
},
{
"name": "CZK TrialHelp",
"account_number": "2900046548",
"token": "pKZVHbFDVsbTa8ryEaVc6A2nyrlb4TbT1tCiimieesHvhKFoJmYBRVjCpnvjiUUK"
},
{
"name": "CZK maminka svojě věci",
"account_number": "2003310572",
"token": "TkrRvnMK77OSSYdVulNvZcT6ltWcmjqkp3RN5WYwnBpNTuaKCWO1zHKOlDGAiNyv"
},
{
"name": "CZK na jídlo",
"account_number": "2403310563",
"token": "axRvFxu4VCzsDp5QZXN8LQ0fQUqzV2FEBZrM595x3Rtp10zowRBcGOFs9uNNPb7Q"
},
{
"name": "CZK ordinace",
"account_number": "2800046620",
"token": "Xzdr3eK7se7ZgeE3JujgeidGb0WrB7mGQ6HSOiBJzWi0kPURYKRpkRKB3ZOpt3rq"
}
]