notebook
This commit is contained in:
239
22 ReadJSONfullonetimeselected.py
Normal file
239
22 ReadJSONfullonetimeselected.py
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
from datetime import date, timedelta
|
||||
from pathlib import Path
|
||||
import requests
|
||||
import pymysql
|
||||
|
||||
# =========================================
|
||||
# CONFIG
|
||||
# =========================================
|
||||
ACCOUNTS_FILE = r"u:\PycharmProjects\FIO\accounts.json"
|
||||
JSON_BASE_DIR = r"u:\Dropbox\!!!Days\Downloads Z230\Fio"
|
||||
|
||||
DB = {
|
||||
"host": "192.168.1.76",
|
||||
"port": 3307,
|
||||
"user": "root",
|
||||
"password": "Vlado9674+",
|
||||
"database": "fio",
|
||||
"charset": "utf8mb4",
|
||||
}
|
||||
|
||||
BATCH_SIZE = 500
|
||||
|
||||
|
||||
# =========================================
|
||||
# HELPERS
|
||||
# =========================================
|
||||
def load_accounts(path: str):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
accounts = json.load(f)
|
||||
|
||||
for acc in accounts:
|
||||
for key in ("name", "account_number", "token"):
|
||||
if key not in acc:
|
||||
raise ValueError(f"Missing key '{key}' in {acc}")
|
||||
|
||||
return accounts
|
||||
|
||||
|
||||
def fio_period_url(token: str, d_from: date, d_to: date):
|
||||
return f"https://fioapi.fio.cz/v1/rest/periods/{token}/{d_from:%Y-%m-%d}/{d_to:%Y-%m-%d}/transactions.json"
|
||||
|
||||
|
||||
def fetch_fio_json(token: str, d_from: date, d_to: date):
|
||||
url = fio_period_url(token, d_from, d_to)
|
||||
resp = requests.get(url, timeout=30)
|
||||
if resp.status_code != 200:
|
||||
print(f"❌ HTTP {resp.status_code} from Fio: {url}")
|
||||
return None
|
||||
try:
|
||||
return resp.json()
|
||||
except json.JSONDecodeError:
|
||||
print("❌ JSON decode error")
|
||||
return None
|
||||
|
||||
|
||||
def safe_col(t, n):
|
||||
key = f"column{n}"
|
||||
v = t.get(key)
|
||||
return None if not v else v.get("value")
|
||||
|
||||
|
||||
def clean_date(dt):
|
||||
if not dt:
|
||||
return None
|
||||
return dt[:10] # "YYYY-MM-DD"
|
||||
|
||||
|
||||
def ensure_dir(p: Path):
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def save_json(base_dir, account_cfg, data, d_from, d_to):
|
||||
folder = account_cfg["account_number"].replace("/", "_")
|
||||
out = Path(base_dir) / folder
|
||||
ensure_dir(out)
|
||||
|
||||
filename = f"FULL_{d_from:%Y-%m-%d}_to_{d_to:%Y-%m-%d}.json"
|
||||
path = out / filename
|
||||
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
# =========================================
|
||||
# MAIN
|
||||
# =========================================
|
||||
def main():
|
||||
print("\n=== FULL HISTORY IMPORT (Fio extra-permissions token) ===\n")
|
||||
|
||||
accounts = load_accounts(ACCOUNTS_FILE)
|
||||
|
||||
# show accounts to user
|
||||
for i, acc in enumerate(accounts, start=1):
|
||||
print(f"{i}. {acc['name']} ({acc['account_number']})")
|
||||
|
||||
print()
|
||||
while True:
|
||||
try:
|
||||
selection = int(input("Select account number (1..N): "))
|
||||
if 1 <= selection <= len(accounts):
|
||||
break
|
||||
except:
|
||||
pass
|
||||
print("Invalid selection, try again.\n")
|
||||
|
||||
acc = accounts[selection - 1]
|
||||
name = acc["name"]
|
||||
token = acc["token"]
|
||||
acc_num = acc["account_number"]
|
||||
|
||||
print(f"\nSelected: {name} ({acc_num})")
|
||||
print("⚠ Make sure you generated the special 10-minute FULL-HISTORY TOKEN.")
|
||||
input("Press ENTER to continue...")
|
||||
|
||||
# full 20-year history
|
||||
today = date.today()
|
||||
d_from = today.replace(year=today.year - 20)
|
||||
d_to = today
|
||||
|
||||
print(f"\nDownloading ALL transactions from {d_from} to {d_to}…")
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
data = fetch_fio_json(token, d_from, d_to)
|
||||
if data is None:
|
||||
print("❌ Download failed")
|
||||
return
|
||||
|
||||
# save JSON
|
||||
json_path = save_json(JSON_BASE_DIR, acc, data, d_from, d_to)
|
||||
print(f"JSON saved to {json_path}")
|
||||
|
||||
# extract transactions
|
||||
tlist = data["accountStatement"]["transactionList"].get("transaction", [])
|
||||
if isinstance(tlist, dict):
|
||||
tlist = [tlist]
|
||||
|
||||
print(f"Transactions loaded: {len(tlist)}")
|
||||
|
||||
if not tlist:
|
||||
print("No transactions found, exiting.")
|
||||
return
|
||||
|
||||
# MySQL
|
||||
conn = pymysql.connect(**DB)
|
||||
cur = conn.cursor()
|
||||
|
||||
sql = """
|
||||
INSERT INTO transactions
|
||||
(
|
||||
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
|
||||
ks, vs, ss, zprava_pro_prijemce, poznamka,
|
||||
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
|
||||
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
|
||||
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
|
||||
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
|
||||
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
|
||||
)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
datum=VALUES(datum),
|
||||
objem=VALUES(objem),
|
||||
mena=VALUES(mena),
|
||||
protiucet=VALUES(protiucet),
|
||||
kod_banky=VALUES(kod_banky),
|
||||
ks=VALUES(ks),
|
||||
vs=VALUES(vs),
|
||||
ss=VALUES(ss),
|
||||
zprava_pro_prijemce=VALUES(zprava_pro_prijemce),
|
||||
poznamka=VALUES(poznamka),
|
||||
ks_1=VALUES(ks_1),
|
||||
nazev_banky=VALUES(nazev_banky),
|
||||
nazev_protiuctu=VALUES(nazev_protiuctu),
|
||||
ss_1=VALUES(ss_1),
|
||||
typ=VALUES(typ),
|
||||
upresneni_objem=VALUES(upresneni_objem),
|
||||
upresneni_mena=VALUES(upresneni_mena),
|
||||
vs_1=VALUES(vs_1),
|
||||
zadal=VALUES(zadal)
|
||||
"""
|
||||
|
||||
fio_acc_id = data["accountStatement"]["info"]["accountId"]
|
||||
|
||||
# build batch
|
||||
rows = []
|
||||
for t in tlist:
|
||||
rows.append({
|
||||
"datum": clean_date(safe_col(t, 0)),
|
||||
"objem": safe_col(t, 1),
|
||||
"mena": safe_col(t, 14),
|
||||
"cislo_uctu": fio_acc_id,
|
||||
"protiucet": safe_col(t, 2),
|
||||
"kod_banky": safe_col(t, 3),
|
||||
"ks": safe_col(t, 4),
|
||||
"vs": safe_col(t, 5),
|
||||
"ss": safe_col(t, 6),
|
||||
"zprava": safe_col(t, 16),
|
||||
"poznamka": safe_col(t, 25),
|
||||
"id_operace": safe_col(t, 22),
|
||||
"id_pokynu": safe_col(t, 24),
|
||||
"ks1": safe_col(t, 18),
|
||||
"nazev_banky": safe_col(t, 15),
|
||||
"nazev_protiuctu": safe_col(t, 10),
|
||||
"ss1": safe_col(t, 19),
|
||||
"typ": safe_col(t, 8),
|
||||
"upr_objem": safe_col(t, 20),
|
||||
"upr_mena": safe_col(t, 21),
|
||||
"vs1": safe_col(t, 17),
|
||||
"zadal": safe_col(t, 12),
|
||||
})
|
||||
|
||||
# batch insert
|
||||
inserted = 0
|
||||
for i in range(0, len(rows), BATCH_SIZE):
|
||||
chunk = rows[i:i + BATCH_SIZE]
|
||||
cur.executemany(sql, chunk)
|
||||
conn.commit()
|
||||
inserted += len(chunk)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
print(f"\n✓ Inserted/updated {inserted} transactions.")
|
||||
print(f"Total time: {time.time() - start_time:.2f} s")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user