This commit is contained in:
2025-11-25 20:43:37 +01:00
parent db2fe7e990
commit d75930f6e9
6 changed files with 747 additions and 14 deletions

171
20 ReadJSON.py Normal file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import pymysql
import time
# =========================================
# CONFIG
# =========================================
JSON_PATH = r"u:\Dropbox\!!!Days\Downloads Z230\Fio\pohyby.json"
DB = {
"host": "192.168.1.76",
"port": 3307,
"user": "root",
"password": "Vlado9674+",
"database": "fio",
"charset": "utf8mb4",
}
BATCH_SIZE = 500 # how many rows per executemany()
# =========================================
# HELPERS
# =========================================
def col(t, n):
"""
Safely read t['columnN']['value'], even if:
- columnN missing
- columnN is None
- value is None
"""
key = f"column{n}"
val = t.get(key)
if not val:
return None
return val.get("value")
def clean_date(dt):
"""
Convert Fio date '2025-10-26+0200' -> '2025-10-26'
(Fio spec: date is always rrrr-mm-dd+GMT)
"""
if not dt:
return None
return dt[:10]
# =========================================
# LOAD JSON
# =========================================
start_time = time.time()
with open(JSON_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
transactions = data["accountStatement"]["transactionList"].get("transaction", [])
# If only one transaction, Fio may return a dict instead of list
if isinstance(transactions, dict):
transactions = [transactions]
print(f"Loaded {len(transactions)} transactions from JSON")
account_number = data["accountStatement"]["info"]["accountId"]
# =========================================
# BUILD ROWS FOR BULK INSERT
# =========================================
rows = []
for t in transactions:
row = {
"datum": clean_date(col(t, 0)),
"objem": col(t, 1),
"mena": col(t, 14),
"cislo_uctu": account_number,
"protiucet": col(t, 2),
"kod_banky": col(t, 3),
"ks": col(t, 4),
"vs": col(t, 5),
"ss": col(t, 6),
"zprava": col(t, 16),
"poznamka": col(t, 25),
"id_operace": col(t, 22),
"id_pokynu": col(t, 24),
"ks1": col(t, 18),
"nazev_banky": col(t, 15),
"nazev_protiuctu": col(t, 10),
"ss1": col(t, 19),
"typ": col(t, 8),
"upr_objem": col(t, 20),
"upr_mena": col(t, 21),
"vs1": col(t, 17),
"zadal": col(t, 12),
}
rows.append(row)
print(f"Prepared {len(rows)} rows for DB insert/update")
# =========================================
# MYSQL INSERT (BATCHED)
# =========================================
conn = pymysql.connect(**DB)
cur = conn.cursor()
sql = """
INSERT INTO transactions
(
datum, objem, mena, cislo_uctu, protiucet, kod_banky,
ks, vs, ss, zprava_pro_prijemce, poznamka,
id_operace, id_pokynu, ks_1, nazev_banky, nazev_protiuctu,
ss_1, typ, upresneni_objem, upresneni_mena, vs_1, zadal
)
VALUES
(
%(datum)s, %(objem)s, %(mena)s, %(cislo_uctu)s, %(protiucet)s, %(kod_banky)s,
%(ks)s, %(vs)s, %(ss)s, %(zprava)s, %(poznamka)s,
%(id_operace)s, %(id_pokynu)s, %(ks1)s, %(nazev_banky)s, %(nazev_protiuctu)s,
%(ss1)s, %(typ)s, %(upr_objem)s, %(upr_mena)s, %(vs1)s, %(zadal)s
)
ON DUPLICATE KEY UPDATE
datum = VALUES(datum),
objem = VALUES(objem),
mena = VALUES(mena),
protiucet = VALUES(protiucet),
kod_banky = VALUES(kod_banky),
ks = VALUES(ks),
vs = VALUES(vs),
ss = VALUES(ss),
zprava_pro_prijemce = VALUES(zprava_pro_prijemce),
poznamka = VALUES(poznamka),
ks_1 = VALUES(ks_1),
nazev_banky = VALUES(nazev_banky),
nazev_protiuctu = VALUES(nazev_protiuctu),
ss_1 = VALUES(ss_1),
typ = VALUES(typ),
upresneni_objem = VALUES(upresneni_objem),
upresneni_mena = VALUES(upresneni_mena),
vs_1 = VALUES(vs_1),
zadal = VALUES(zadal)
"""
total = len(rows)
inserted = 0
for i in range(0, total, BATCH_SIZE):
chunk = rows[i:i + BATCH_SIZE]
cur.executemany(sql, chunk)
conn.commit()
inserted += len(chunk)
# optional progress info:
# print(f"Committed {inserted}/{total} rows")
cur.close()
conn.close()
elapsed = time.time() - start_time
print(f"✓ Imported {inserted} transactions into MySQL in {elapsed:.2f} seconds.")