gchr.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566
  1. import csv
  2. import logging
  3. import os
  4. import xml.etree.ElementTree as ET
  5. from dataclasses import dataclass
  6. from datetime import datetime
  7. from pathlib import Path
  8. from xml.dom import minidom
  9. import numpy as np
  10. import pandas as pd
  11. ACCOUNT_INFO = [
  12. "Account",
  13. "Make",
  14. "Site",
  15. "Origin",
  16. "SalesChannel",
  17. "CostCarrier",
  18. "CostAccountingString",
  19. ]
  20. TRANSLATE = [
  21. "Konto_Nr_Händler",
  22. "Konto_Nr_SKR51",
  23. "Marke",
  24. "Standort",
  25. "Konto_Nr",
  26. "Kostenstelle",
  27. "Absatzkanal",
  28. "Kostenträger",
  29. "Kontoart",
  30. "Konto_1",
  31. "KRM",
  32. "IsNumeric",
  33. ]
  34. @dataclass
  35. class GchrConfig:
  36. first_month_of_financial_year: str
  37. class GCHR:
  38. booking_date: datetime
  39. df_bookings: pd.DataFrame = None
  40. df_translate: pd.DataFrame = None
  41. df_translate2: pd.DataFrame = None
  42. makes: dict[str, str] = None
  43. sites: dict[str, str] = None
  44. def __init__(self, base_dir) -> None:
  45. self.base_dir = base_dir
  46. self.account_translation = f"{self.base_dir}/data/Kontenrahmen_uebersetzt.csv"
  47. self.account_bookings = list(Path(self.base_dir).joinpath("data").glob("GuV_Bilanz_Salden*.csv"))
  48. self.first_month_of_financial_year = "10"
  49. pd.set_option("display.max_rows", 500)
  50. pd.set_option("display.float_format", lambda x: "%.2f" % x)
  51. def set_bookkeep_period(self, year, month):
  52. self.current_year = year
  53. self.current_month = month
  54. period = f"{year}-{month}"
  55. prot_file = f"{self.export_info_dir}/protokoll_{period}.log"
  56. logging.basicConfig(
  57. filename=prot_file,
  58. filemode="w",
  59. encoding="utf-8",
  60. level=logging.DEBUG,
  61. force=True,
  62. )
  63. self.debug_file = f"{self.export_info_dir}/debug_{period}.csv"
  64. self.account_ignored = f"{self.export_info_dir}/ignoriert_{period}.csv"
  65. # self.account_invalid = f"{self.export_info_dir}/ungueltig_{period}.csv"
  66. self.last_year = str(int(self.current_year) - 1)
  67. self.last_year2 = str(int(self.current_year) - 2)
  68. self.next_year = str(int(self.current_year) + 1)
  69. def header(self, makes_used, sites_used, main_site):
  70. return {
  71. "Country": "DE",
  72. "MainBmCode": main_site,
  73. "Month": self.current_month,
  74. "Year": self.current_year,
  75. "Currency": "EUR",
  76. "NumberOfMakes": len(makes_used),
  77. "NumberOfSites": len(sites_used),
  78. "ExtractionDate": self.booking_date.strftime("%d.%m.%Y"),
  79. "ExtractionTime": self.booking_date.strftime("%H:%M:%S"),
  80. "BeginFiscalYear": self.first_month_of_financial_year,
  81. }
  82. @property
  83. def bookkeep_filter(self):
  84. period = [self.current_year + str(i).zfill(2) for i in range(1, 13)]
  85. if self.first_month_of_financial_year != "01":
  86. if self.first_month_of_financial_year > self.current_month:
  87. period = [self.last_year + str(i).zfill(2) for i in range(1, 13)] + period
  88. else:
  89. period = period + [self.next_year + str(i).zfill(2) for i in range(1, 13)]
  90. fm = int(self.first_month_of_financial_year)
  91. period = period[fm - 1 : fm + 12]
  92. period = [self.current_year + "00"] + period
  93. rename_to = ["OpeningBalance"] + ["Period" + str(i).zfill(2) for i in range(1, 13)]
  94. return dict(zip(period, rename_to))
  95. def extract_acct_info(self, df: pd.DataFrame):
  96. acct_info = [
  97. "Marke",
  98. "Standort",
  99. "Konto_Nr",
  100. "Kostenstelle",
  101. "Absatzkanal",
  102. "Kostenträger",
  103. ]
  104. df["HasFiveDashes"] = df["Konto_Nr_SKR51"].str.count("-") == 5
  105. df["Invalid"] = "XX-XX-XXXX-XX-XX-XX"
  106. df["Konto_Nr_SKR51"] = np.where(
  107. df["HasFiveDashes"],
  108. df["Konto_Nr_SKR51"],
  109. df["Invalid"],
  110. )
  111. df[acct_info] = df["Konto_Nr_SKR51"].str.split(pat="-", n=6, expand=True)
  112. return df
  113. def export_all_periods(self, overwrite=False, today=None):
  114. dt = datetime.now()
  115. if today is not None:
  116. dt = datetime.fromisoformat(today)
  117. prev = str(dt.year - 1)
  118. periods = [(prev, str(x).zfill(2)) for x in range(dt.month, 13)] + [
  119. (str(dt.year), str(x).zfill(2)) for x in range(1, dt.month)
  120. ]
  121. for year, month in periods:
  122. filename = self.export_filename_for_period(year, month)
  123. if overwrite or not Path(filename).exists():
  124. os.makedirs(Path(filename).parent.joinpath("info"), exist_ok=True)
  125. self.export_period(year, month)
  126. def export_period(self, year, month):
  127. self.set_bookkeep_period(year, month)
  128. # Übersetzungstabelle laden
  129. self.get_translation()
  130. # Kontensalden laden
  131. df_bookings = self.filter_bookings()
  132. all_periods = set(df_bookings["Bookkeep Period"].to_list())
  133. bookkeep_period_date = datetime(int(year), int(month), 28)
  134. if df_bookings.shape[0] == 0 or len(all_periods) <= 1 or self.booking_date < bookkeep_period_date:
  135. logging.error("ABBRUCH!!! Keine Daten vorhanden!")
  136. return False
  137. filter_to = self.current_year + self.current_month
  138. period_no = list(self.bookkeep_filter.keys()).index(filter_to) + 1
  139. logging.info("df_bookings: " + str(df_bookings.shape))
  140. # Join auf Übersetzung
  141. df_combined = df_bookings.merge(self.df_translate, how="inner", on="Konto_Nr_Händler")
  142. logging.info(f"df_combined: {df_combined.shape}")
  143. df_pivot = df_combined.pivot_table(
  144. index=["Konto_Nr_SKR51"],
  145. columns=["period"],
  146. values="amount",
  147. aggfunc="sum",
  148. margins=True,
  149. margins_name="CumulatedYear",
  150. )
  151. df_pivot.drop(index="CumulatedYear", inplace=True)
  152. logging.info("df_pivot: " + str(df_pivot.shape))
  153. df = df_pivot.merge(self.df_translate2, how="inner", on="Konto_Nr_SKR51")
  154. makes_used = sorted(list(set(df["Marke"].to_list())))
  155. sites_used = sorted(list(set((df["Marke"] + "-" + df["Standort"]).to_list())))
  156. from_label = ["Marke", "Standort", "Konto_Nr", "Kostenstelle", "Absatzkanal", "Kostenträger", "KRM"]
  157. to_label = ["Make", "Site", "Account", "Origin", "SalesChannel", "CostCarrier", "CostAccountingString"]
  158. col_dict = dict(zip(from_label, to_label))
  159. df = df.rename(columns=col_dict)
  160. export_csv = self.export_filename[:-4] + ".csv"
  161. df.to_csv(export_csv, decimal=",", sep=";", encoding="latin-1", index=False)
  162. df = df[df["IsNumeric"] != False].groupby(ACCOUNT_INFO, as_index=False).aggregate("sum")
  163. # Infos ergänzen
  164. df["Decimals"] = 2
  165. # df.sort_values(by=["Konto_Nr_SKR51"], inplace=True)
  166. logging.info(df.shape)
  167. main_sites = [self.sites[s] for s in sites_used if s in self.sites and self.sites[s] != "0000"]
  168. for i, main_site in enumerate(main_sites):
  169. filename = self.export_filename
  170. if i > 0:
  171. filename = f"{filename[:-4]}_{main_site}.xml"
  172. self.export_skr51_xml(
  173. df.to_dict(orient="records"),
  174. self.bookkeep_filter,
  175. period_no,
  176. makes_used,
  177. sites_used,
  178. main_site,
  179. filename,
  180. )
  181. # Join auf Übersetzung - nicht zugeordnet
  182. df_ignored = df_bookings.merge(self.df_translate, how="left", on="Konto_Nr_Händler")
  183. df_ignored = df_ignored[df_ignored["Konto_Nr_SKR51"].isna()]
  184. if not df_ignored.empty:
  185. df_ignored = df_ignored.pivot_table(
  186. index=["Konto_Nr_Händler"],
  187. columns=["period"],
  188. values="amount",
  189. aggfunc="sum",
  190. margins=True,
  191. margins_name="CumulatedYear",
  192. )
  193. df_ignored.to_csv(self.account_ignored, decimal=",", sep=";", encoding="latin-1")
  194. return self.export_filename
  195. def get_translation(self):
  196. if self.df_translate is None:
  197. df_translate_import = pd.read_csv(
  198. self.account_translation,
  199. decimal=",",
  200. sep=";",
  201. encoding="latin-1",
  202. converters={i: str for i in range(0, 200)},
  203. ).reset_index()
  204. df_makes = df_translate_import[["Marke", "Marke_HBV"]].copy().drop_duplicates()
  205. df_makes = df_makes[df_makes["Marke_HBV"] != "0000"]
  206. self.makes = dict([(e["Marke"], e["Marke_HBV"]) for e in df_makes.to_dict(orient="records")])
  207. self.makes["99"] = "0000"
  208. df_sites = df_translate_import[["Marke", "Standort", "Standort_HBV"]].copy().drop_duplicates()
  209. df_sites["Standort_HBV"] = np.where(
  210. df_sites["Standort_HBV"].str.len() != 6, "0000", df_sites["Standort_HBV"]
  211. )
  212. self.sites = dict(
  213. [(e["Marke"] + "-" + e["Standort"], e["Standort_HBV"]) for e in df_sites.to_dict(orient="records")]
  214. )
  215. df_prepared = self.prepare_translation(df_translate_import)
  216. self.df_translate = self.special_translation(df_prepared)
  217. self.df_translate2 = (
  218. self.df_translate.drop(columns=["Konto_Nr_Händler"])
  219. .copy()
  220. .drop_duplicates()
  221. .set_index("Konto_Nr_SKR51")
  222. )
  223. return self.df_translate
  224. def prepare_translation(self, df_translate_import: pd.DataFrame):
  225. df_translate = df_translate_import[
  226. [
  227. "Konto_Nr_Händler",
  228. "Konto_Nr_SKR51",
  229. ]
  230. ].drop_duplicates()
  231. logging.info(df_translate.shape)
  232. row = {
  233. "Konto_Nr_Händler": "01-01-0861-00-00-00",
  234. "Konto_Nr_SKR51": "01-01-0861-00-00-00",
  235. }
  236. df_translate = pd.concat([df_translate, pd.DataFrame.from_records([row])])
  237. df_translate.set_index("Konto_Nr_Händler")
  238. return df_translate
  239. def special_translation(self, df: pd.DataFrame):
  240. df["Konto_Nr_Händler"] = df["Konto_Nr_Händler"].str.upper()
  241. df["Konto_Nr_SKR51"] = df["Konto_Nr_SKR51"].str.upper()
  242. df = self.extract_acct_info(df)
  243. df["Konto_Nr"] = df["Konto_Nr"].str.upper()
  244. logging.info(df.shape)
  245. logging.info(df.columns)
  246. logging.info(df.head())
  247. logging.info("df: " + str(df.shape))
  248. df["Bilanz"] = df["Konto_Nr"].str.match(r"^[013]")
  249. df["Kontoart"] = np.where(df["Bilanz"], "1", "2")
  250. df["Kontoart"] = np.where(df["Konto_Nr"].str.contains("_STK"), "3", df["Kontoart"])
  251. df["Kontoart"] = np.where(df["Konto_Nr"].str.match(r"^[9]"), "3", df["Kontoart"])
  252. df["Konto_1"] = df["Konto_Nr"].str.slice(0, 1)
  253. # fehlende Marken- und Standortzuordnung
  254. df["Marke"] = np.where(df["Marke"].isin(self.makes.keys()), df["Marke"], "99")
  255. df["Marke_Standort"] = df["Marke"] + "-" + df["Standort"]
  256. df["Standort"] = np.where(df["Marke_Standort"].isin(self.sites.keys()), df["Standort"], "01")
  257. df_debug = df.drop(columns=["Bilanz"])
  258. logging.info(df_debug.groupby(["Kontoart"]).aggregate("sum"))
  259. logging.info(df_debug.groupby(["Kontoart", "Konto_1"]).aggregate("sum"))
  260. logging.info(df_debug.groupby(["Konto_Nr"]).aggregate("sum"))
  261. df_debug.groupby(["Konto_Nr"]).aggregate("sum").to_csv(
  262. self.debug_file, decimal=",", sep=";", encoding="latin-1"
  263. )
  264. # Bereinigung GW-Kostenträger
  265. df["NW_Verkauf_1"] = (df["Konto_Nr"].str.match(r"^[78]0")) & (df["Kostenstelle"].str.match(r"^[^1]\d"))
  266. df["Kostenstelle"] = np.where(df["NW_Verkauf_1"] == True, "11", df["Kostenstelle"])
  267. df["Konto_7010"] = df["Konto_Nr"].str.match(r"^[78]01[01]")
  268. df["Kostenstelle"] = np.where(df["Konto_7010"] == True, "14", df["Kostenstelle"])
  269. df["GW_Verkauf_2"] = (df["Konto_Nr"].str.match(r"^[78]1")) & (df["Kostenstelle"].str.match(r"^[^2]\d"))
  270. df["Kostenstelle"] = np.where(df["GW_Verkauf_2"] == True, "21", df["Kostenstelle"])
  271. df["GW_Verkauf_3"] = (df["Konto_Nr"].str.match(r"^[78]3")) & (df["Kostenstelle"].str.match(r"^[^3]\d"))
  272. df["Kostenstelle"] = np.where(df["GW_Verkauf_3"] == True, "31", df["Kostenstelle"])
  273. df["GW_Verkauf_4"] = (df["Konto_Nr"].str.match(r"^[78]4")) & (df["Kostenstelle"].str.match(r"^[^4]\d"))
  274. df["Kostenstelle"] = np.where(df["GW_Verkauf_4"] == True, "41", df["Kostenstelle"])
  275. df["GW_Verkauf_x420"] = df["Konto_Nr"].str.match(r"^[78]420")
  276. df["Kostenstelle"] = np.where(df["GW_Verkauf_x420"] == True, "42", df["Kostenstelle"])
  277. df["GW_Verkauf_5"] = (df["Konto_Nr"].str.match(r"^[78]5")) & (df["Kostenstelle"].str.match(r"^[^5]\d"))
  278. df["Kostenstelle"] = np.where(df["GW_Verkauf_5"] == True, "51", df["Kostenstelle"])
  279. df["GW_Verkauf_50"] = (df["Konto_Nr"].str.match(r"^[78]")) & (df["Kostenstelle"].str.match(r"^2"))
  280. df["Kostenträger"] = np.where(df["GW_Verkauf_50"] == True, "52", df["Kostenträger"])
  281. df["Kostenträger"] = np.where(
  282. (df["GW_Verkauf_50"] == True) & (df["Marke"] == "01"),
  283. "50",
  284. df["Kostenträger"],
  285. )
  286. df["NW_Verkauf_00"] = (
  287. (df["Konto_Nr"].str.match(r"^[78]2"))
  288. & (df["Kostenstelle"].str.match(r"^1"))
  289. & (df["Kostenträger"].str.match(r"^[^01234]"))
  290. )
  291. df["Kostenträger"] = np.where(df["NW_Verkauf_00"] == True, "00", df["Kostenträger"])
  292. df["GW_Stk_50"] = (df["Konto_Nr"].str.match(r"^9130")) & (df["Kostenstelle"].str.match(r"^2"))
  293. df["Kostenträger"] = np.where(df["GW_Stk_50"] == True, "52", df["Kostenträger"])
  294. df["Kostenträger"] = np.where((df["GW_Stk_50"] == True) & (df["Marke"] == "01"), "50", df["Kostenträger"])
  295. df["Kostenträger"] = np.where(df["Bilanz"] == True, "00", df["Kostenträger"])
  296. df["Konto_5er"] = (df["Konto_Nr"].str.match("^5")) | (df["Konto_Nr"].str.match("^9143"))
  297. df["Absatzkanal"] = np.where(df["Konto_5er"] == True, "99", df["Absatzkanal"])
  298. df["Konto_5005"] = (df["Konto_Nr"].str.match("^5005")) & (df["Kostenstelle"].str.match(r"^[^12]"))
  299. df["Kostenstelle"] = np.where(df["Konto_5005"] == True, "20", df["Kostenstelle"])
  300. df["Kostenträger"] = np.where(df["Konto_5005"] == True, "50", df["Kostenträger"])
  301. df["Konto_5007"] = (df["Konto_Nr"].str.match("^5007")) & (df["Kostenstelle"].str.match(r"^([^4]|42)"))
  302. df["Kostenstelle"] = np.where(df["Konto_5007"] == True, "41", df["Kostenstelle"])
  303. df["Kostenträger"] = np.where(df["Konto_5007"] == True, "70", df["Kostenträger"])
  304. df["Konto_914er"] = (df["Konto_Nr"].str.match("^914[34]")) & (df["Kostenträger"].str.match(r"^[^7]"))
  305. df["Kostenträger"] = np.where(df["Konto_914er"] == True, "70", df["Kostenträger"])
  306. df["Teile_30_60"] = (
  307. (df["Konto_Nr"].str.match(r"^[578]"))
  308. & (df["Kostenstelle"].str.match(r"^[3]"))
  309. & (df["Kostenträger"].str.match(r"^[^6]"))
  310. )
  311. df["Kostenträger"] = np.where(df["Teile_30_60"] == True, "60", df["Kostenträger"])
  312. df["Service_40_70"] = (
  313. (df["Konto_Nr"].str.match(r"^[578]"))
  314. & (df["Kostenstelle"].str.match(r"^[4]"))
  315. & (df["Kostenträger"].str.match(r"^[^7]"))
  316. )
  317. df["Kostenträger"] = np.where(df["Service_40_70"] == True, "70", df["Kostenträger"])
  318. df["KRM"] = df["Marke"] + df["Standort"] + df["Kostenstelle"] + df["Absatzkanal"] + df["Kostenträger"]
  319. df["Konto_Nr_SKR51"] = (
  320. (df["Marke"] + "-" + df["Standort"] + "-" + df["Konto_Nr"])
  321. + "-"
  322. + (df["Kostenstelle"] + "-" + df["Absatzkanal"] + "-" + df["Kostenträger"])
  323. )
  324. df["IsNumeric"] = (
  325. (df["KRM"].str.isdigit())
  326. & (df["Konto_Nr"].str.isdigit())
  327. & (df["Konto_Nr"].str.len() == 4)
  328. # & (df["Konto_Nr_SKR51"].str.len() == 19)
  329. )
  330. df_invalid = df[df["IsNumeric"] == False]
  331. df_invalid.to_csv(self.export_invalid_filename, decimal=",", sep=";", encoding="latin-1", index=False)
  332. return df[df["IsNumeric"] == True][TRANSLATE]
  333. def load_bookings_from_file(self):
  334. df2 = []
  335. timestamps = []
  336. for csv_file in self.account_bookings:
  337. df2.append(
  338. pd.read_csv(
  339. csv_file,
  340. decimal=",",
  341. sep=";",
  342. encoding="latin-1",
  343. converters={0: str, 1: str},
  344. )
  345. )
  346. timestamps.append(Path(csv_file).stat().st_mtime)
  347. self.booking_date = datetime.fromtimestamp(max(timestamps))
  348. self.df_bookings = pd.concat(df2)
  349. self.df_bookings["amount"] = (self.df_bookings["Debit Amount"] + self.df_bookings["Credit Amount"]).round(2)
  350. def filter_bookings(self):
  351. if self.df_bookings is None:
  352. self.load_bookings_from_file()
  353. # Kontensalden auf gegebenen Monat filtern
  354. filter_from = self.current_year + self.first_month_of_financial_year
  355. filter_prev = self.last_year + self.first_month_of_financial_year
  356. if self.first_month_of_financial_year > self.current_month:
  357. filter_from = self.last_year + self.first_month_of_financial_year
  358. filter_prev = self.last_year2 + self.first_month_of_financial_year
  359. filter_to = self.current_year + self.current_month
  360. filter_opening = self.current_year + "00"
  361. filter_prev_opening = self.last_year + "00"
  362. prev_year_closed = True
  363. df_opening_balance = self.df_bookings[(self.df_bookings["Bookkeep Period"] == filter_opening)]
  364. if df_opening_balance.shape[0] == 0:
  365. df_opening_balance = self.df_bookings[
  366. (self.df_bookings["Bookkeep Period"] == filter_prev_opening)
  367. | (
  368. (self.df_bookings["Bookkeep Period"] >= filter_prev)
  369. & (self.df_bookings["Bookkeep Period"] < filter_from)
  370. )
  371. ].copy()
  372. df_opening_balance["Bookkeep Period"] = filter_opening
  373. prev_year_closed = False
  374. df_opening_balance = df_opening_balance[(df_opening_balance["Konto_Nr_Händler"].str.contains(r"-[013]\d\d+-"))]
  375. opening_balance = df_opening_balance["amount"].aggregate("sum").round(2)
  376. logging.info("Gewinn/Verlustvortrag")
  377. logging.info(opening_balance)
  378. if not prev_year_closed:
  379. row = {
  380. "Konto_Nr_Händler": "01-01-0861-00-00-00",
  381. "Bookkeep Period": filter_opening,
  382. "Debit Amount": opening_balance * -1,
  383. "Credit Amount": 0,
  384. "Debit Quantity": 0,
  385. "Credit Quantity": 0,
  386. "amount": opening_balance * -1,
  387. }
  388. df_opening_balance = pd.concat([df_opening_balance, pd.DataFrame.from_records([row])])
  389. df_filtered = self.df_bookings[
  390. (self.df_bookings["Bookkeep Period"] >= filter_from) & (self.df_bookings["Bookkeep Period"] <= filter_to)
  391. ]
  392. # Buchungen kopieren und als Statistikkonten anhängen
  393. df_stats = df_filtered.copy()
  394. # df_stats = df_stats[df_stats['Konto_Nr_Händler'].str.match(r'-[24578]\d\d\d-')]
  395. df_stats["Konto_Nr_Händler"] = df_stats["Konto_Nr_Händler"].str.replace(r"-(\d\d\d+)-", r"-\1_STK-", regex=True)
  396. df_stats["amount"] = (df_filtered["Debit Quantity"] + df_filtered["Credit Quantity"]).round(2)
  397. df_combined = pd.concat([df_opening_balance, df_filtered, df_stats])
  398. # Spalten konvertieren
  399. df_combined["period"] = df_combined["Bookkeep Period"].apply(lambda x: self.bookkeep_filter[x])
  400. return df_combined[df_combined["amount"] != 0.00]
  401. @property
  402. def export_filename(self):
  403. return self.export_filename_for_period(self.current_year, self.current_month)
  404. @property
  405. def export_info_dir(self):
  406. return f"{self.base_dir}/Export/{self.current_year}/info/"
  407. @property
  408. def export_invalid_filename(self):
  409. return f"{self.base_dir}/Export/ungueltig.csv"
  410. def export_filename_for_period(self, year, month):
  411. return f"{self.base_dir}/Export/{year}/export_{year}-{month}.xml"
  412. def export_skr51_xml(self, records, bk_filter, period_no, makes_used, sites_used, main_site, filename):
  413. record_elements = ACCOUNT_INFO + ["Decimals"] + list(bk_filter.values())[:period_no] + ["CumulatedYear"]
  414. root = ET.Element("HbvData")
  415. h = ET.SubElement(root, "Header")
  416. for k, v in self.header(makes_used, sites_used, main_site).items():
  417. ET.SubElement(h, k).text = str(v)
  418. make_list = ET.SubElement(root, "MakeList")
  419. for make in makes_used:
  420. if make not in self.makes:
  421. continue
  422. e = ET.SubElement(make_list, "MakeListEntry")
  423. ET.SubElement(e, "Make").text = make
  424. ET.SubElement(e, "MakeCode").text = self.makes[make]
  425. bm_code_list = ET.SubElement(root, "BmCodeList")
  426. for s in sites_used:
  427. make, site = s.split("-")
  428. if s not in self.sites:
  429. continue
  430. e = ET.SubElement(bm_code_list, "BmCodeEntry")
  431. ET.SubElement(e, "Make").text = make
  432. ET.SubElement(e, "Site").text = site
  433. ET.SubElement(e, "BmCode").text = self.sites[s]
  434. record_list = ET.SubElement(root, "RecordList")
  435. for row in records:
  436. record = ET.SubElement(record_list, "Record")
  437. for e in record_elements:
  438. child = ET.SubElement(record, e)
  439. field = row.get(e, 0.0)
  440. if str(field) == "nan":
  441. field = "0"
  442. elif type(field) is float:
  443. field = "{:.0f}".format(field * 100)
  444. child.text = str(field)
  445. with open(filename, "w", encoding="utf-8") as fwh:
  446. fwh.write(minidom.parseString(ET.tostring(root)).toprettyxml(indent=" "))
  447. def convert_to_row(self, node):
  448. return [child.text for child in node]
  449. def convert_xml_to_csv(self, xmlfile, csvfile):
  450. record_list = ET.parse(xmlfile).getroot().find("RecordList")
  451. header = [child.tag for child in record_list.find("Record")]
  452. bookings = [self.convert_to_row(node) for node in record_list.findall("Record")]
  453. with open(csvfile, "w") as fwh:
  454. cwh = csv.writer(fwh, delimiter=";")
  455. cwh.writerow(header)
  456. cwh.writerows(bookings)
  457. return True
  458. def convert_csv_to_xml(self, csvfile, xmlfile):
  459. self.makes = {"01": "1844"}
  460. self.sites = {"01-01": "1844"}
  461. with open(csvfile, "r", encoding="latin-1") as frh:
  462. csv_reader = csv.DictReader(frh, delimiter=";")
  463. self.export_skr51_xml(csv_reader, self.bookkeep_filter(), 1, list(self.sites.values())[0], xmlfile)
  464. def gchr_local():
  465. base_dir = os.getcwd() + "/../GCHR2_Testdaten/Kunden"
  466. for path in Path(base_dir).glob("*"):
  467. if path.is_dir():
  468. print(path.name)
  469. gchr_export(str(path))
  470. def gchr_export(base_dir):
  471. gchr = GCHR(base_dir)
  472. # gchr.export_all_periods(overwrite=True, today="2022-08-01")
  473. gchr.export_all_periods()
  474. if __name__ == "__main__":
  475. gchr_local()
  476. # import cProfile
  477. # cProfile.run(
  478. # "gchr_local()",
  479. # "gchr_local.prof",
  480. # )