gchr.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504
  1. import pandas as pd
  2. import numpy as np
  3. import xml.etree.ElementTree as ET
  4. import csv
  5. from xml.dom import minidom
  6. from datetime import datetime
  7. import logging
  8. from pathlib import Path
  9. import os
  10. ACCOUNT_INFO = [
  11. "Account",
  12. "Make",
  13. "Site",
  14. "Origin",
  15. "SalesChannel",
  16. "CostCarrier",
  17. "CostAccountingString",
  18. ]
  19. class GCHR:
  20. booking_date: datetime
  21. def __init__(self, base_dir) -> None:
  22. self.base_dir = base_dir
  23. self.account_translation = f"{self.base_dir}/data/Kontenrahmen_uebersetzt.csv"
  24. self.account_bookings = list(Path(self.base_dir).joinpath("data").glob("GuV_Bilanz_Salden*.csv"))
  25. self.first_month_of_financial_year = "01"
  26. pd.set_option("display.max_rows", 500)
  27. pd.set_option("display.float_format", lambda x: "%.2f" % x)
  28. def set_bookkeep_period(self, year, month):
  29. self.current_year = year
  30. self.current_month = month
  31. period = f"{year}-{month}"
  32. prot_file = f"{self.export_info_dir}/protokoll_{period}.log"
  33. logging.basicConfig(
  34. filename=prot_file,
  35. filemode="w",
  36. encoding="utf-8",
  37. level=logging.DEBUG,
  38. force=True,
  39. )
  40. self.debug_file = f"{self.export_info_dir}/debug_{period}.csv"
  41. self.account_ignored = f"{self.export_info_dir}/ignoriert_{period}.csv"
  42. self.account_invalid = f"{self.export_info_dir}/ungueltig_{period}.csv"
  43. self.last_year = str(int(self.current_year) - 1)
  44. self.last_year2 = str(int(self.current_year) - 2)
  45. self.next_year = str(int(self.current_year) + 1)
  46. def header(self, makes, sites):
  47. return {
  48. "Country": "DE",
  49. "MainBmCode": sites[0]["Standort_HBV"],
  50. "Month": self.current_month,
  51. "Year": self.current_year,
  52. "Currency": "EUR",
  53. "NumberOfMakes": len(makes),
  54. "NumberOfSites": len(sites),
  55. "ExtractionDate": self.booking_date.strftime("%d.%m.%Y"),
  56. "ExtractionTime": self.booking_date.strftime("%H:%M:%S"),
  57. "BeginFiscalYear": self.first_month_of_financial_year,
  58. }
  59. @property
  60. def bookkeep_filter(self):
  61. period = [self.current_year + str(i).zfill(2) for i in range(1, 13)]
  62. if self.first_month_of_financial_year != "01":
  63. if self.first_month_of_financial_year > self.current_month:
  64. period = [self.last_year + str(i).zfill(2) for i in range(1, 13)] + period
  65. else:
  66. period = period + [self.next_year + str(i).zfill(2) for i in range(1, 13)]
  67. fm = int(self.first_month_of_financial_year)
  68. period = period[fm - 1 : fm + 12]
  69. period = [self.current_year + "00"] + period
  70. rename_to = ["OpeningBalance"] + ["Period" + str(i).zfill(2) for i in range(1, 13)]
  71. return dict(zip(period, rename_to))
  72. def extract_acct_info(self, df: pd.DataFrame):
  73. acct_info = [
  74. "Marke",
  75. "Standort",
  76. "Konto_Nr",
  77. "Kostenstelle",
  78. "Absatzkanal",
  79. "Kostenträger",
  80. ]
  81. df["Konto_Nr_SKR51"] = df.index
  82. df[acct_info] = df["Konto_Nr_SKR51"].str.split(pat="-", n=6, expand=True)
  83. return df
  84. def export_all_periods(self, overwrite=False, today=None):
  85. dt = datetime.now()
  86. if today is not None:
  87. dt = datetime.fromisoformat(today)
  88. prev = str(dt.year - 1)
  89. periods = [(prev, str(x).zfill(2)) for x in range(dt.month, 13)] + [
  90. (str(dt.year), str(x).zfill(2)) for x in range(1, dt.month)
  91. ]
  92. for year, month in periods:
  93. filename = self.export_filename_for_period(year, month)
  94. if overwrite or not Path(filename).exists():
  95. os.makedirs(Path(filename).parent.joinpath("info"), exist_ok=True)
  96. self.export_period(year, month)
  97. def export_period(self, year, month):
  98. self.set_bookkeep_period(year, month)
  99. # Übersetzungstabelle laden
  100. df_translate_import = pd.read_csv(
  101. self.account_translation,
  102. decimal=",",
  103. sep=";",
  104. encoding="latin-1",
  105. converters={i: str for i in range(0, 200)},
  106. )
  107. df_translate = self.prepare_translation(df_translate_import)
  108. # Kontensalden laden
  109. df_bookings = self.load_bookings_from_file()
  110. all_periods = set(df_bookings["Bookkeep Period"].to_list())
  111. bookkeep_period_date = datetime(int(year), int(month), 28)
  112. if df_bookings.shape[0] == 0 or len(all_periods) <= 1 or self.booking_date < bookkeep_period_date:
  113. logging.error("ABBRUCH!!! Keine Daten vorhanden!")
  114. return False
  115. filter_to = self.current_year + self.current_month
  116. period_no = list(self.bookkeep_filter.keys()).index(filter_to) + 1
  117. logging.info("df_bookings: " + str(df_bookings.shape))
  118. # Join auf Übersetzung
  119. df_combined = df_bookings.merge(df_translate, how="inner", on="Konto_Nr_Händler")
  120. logging.info(f"df_combined: {df_combined.shape}")
  121. # Hack für fehlende Markenzuordnung
  122. df_combined["Fremdmarke"] = df_combined["Marke_HBV"].str.match(r"^0000")
  123. df_combined["Marke"] = np.where(df_combined["Fremdmarke"], "99", df_combined["Marke"])
  124. df_combined["Standort_egal"] = df_combined["Standort_HBV"].str.match(r"^\d\d_")
  125. df_combined["Standort_HBV"] = np.where(
  126. df_combined["Fremdmarke"] | df_combined["Standort_egal"],
  127. "0000",
  128. df_combined["Standort_HBV"],
  129. )
  130. makes = df_combined[["Marke", "Marke_HBV"]].drop_duplicates().sort_values(by=["Marke"])
  131. sites = (
  132. df_combined[["Marke", "Standort", "Standort_HBV"]].drop_duplicates().sort_values(by=["Marke", "Standort"])
  133. )
  134. # df_combined.to_csv(account_invalid, decimal=',', sep=';', encoding='latin-1', index=False)
  135. # Gruppieren
  136. # df_grouped = df_combined.groupby(['Konto_Nr_SKR51', 'period']).sum()
  137. df_pivot = df_combined.pivot_table(
  138. index=["Konto_Nr_SKR51"],
  139. columns=["period"],
  140. values="amount",
  141. aggfunc="sum",
  142. margins=True,
  143. margins_name="CumulatedYear",
  144. )
  145. df_pivot.drop(index="CumulatedYear", inplace=True)
  146. logging.info("df_pivot: " + str(df_pivot.shape))
  147. df = self.special_translation(df_pivot, makes)
  148. from_label = ["Marke", "Standort", "Konto_Nr", "Kostenstelle", "Absatzkanal", "Kostenträger", "KRM"]
  149. to_label = ["Make", "Site", "Account", "Origin", "SalesChannel", "CostCarrier", "CostAccountingString"]
  150. col_dict = dict(zip(from_label, to_label))
  151. df = df.rename(columns=col_dict)
  152. makes = makes.rename(columns=col_dict).to_dict(orient="records")
  153. sites = sites.rename(columns=col_dict).to_dict(orient="records")
  154. df_invalid = df[df["IsNumeric"] == False]
  155. df_invalid.to_csv(self.account_invalid, decimal=",", sep=";", encoding="latin-1", index=False)
  156. export_csv = self.export_filename[:-4] + ".csv"
  157. df.to_csv(export_csv, decimal=",", sep=";", encoding="latin-1", index=False)
  158. df = df[df["IsNumeric"] != False].groupby(ACCOUNT_INFO, as_index=False).aggregate("sum")
  159. # Infos ergänzen
  160. df["Decimals"] = 2
  161. # df['OpeningBalance'] = 0.0
  162. logging.info(df.shape)
  163. self.export_xml(df.to_dict(orient="records"), self.bookkeep_filter, period_no, makes, sites)
  164. # Join auf Übersetzung - nicht zugeordnet
  165. df_ignored = df_bookings.merge(df_translate, how="left", on="Konto_Nr_Händler")
  166. df_ignored = df_ignored[
  167. df_ignored["Konto_Nr_SKR51"].isna()
  168. ] # [['Konto_Nr_Händler', 'Bookkeep Period', 'amount', 'quantity']]
  169. if not df_ignored.empty:
  170. df_ignored = df_ignored.pivot_table(
  171. index=["Konto_Nr_Händler"],
  172. columns=["period"],
  173. values="amount",
  174. aggfunc=np.sum,
  175. margins=True,
  176. margins_name="CumulatedYear",
  177. )
  178. df_ignored.to_csv(self.account_ignored, decimal=",", sep=";", encoding="latin-1")
  179. return self.export_filename
  180. def prepare_translation(self, df_translate: pd.DataFrame):
  181. logging.info(df_translate.shape)
  182. df_translate["duplicated"] = df_translate.duplicated()
  183. logging.info(df_translate[df_translate["duplicated"]])
  184. df_translate = df_translate[
  185. [
  186. "Konto_Nr_Händler",
  187. "Konto_Nr_SKR51",
  188. "Marke",
  189. "Marke_HBV",
  190. "Standort",
  191. "Standort_HBV",
  192. ]
  193. ]
  194. row = (
  195. df_translate[["Marke", "Marke_HBV", "Standort", "Standort_HBV"]]
  196. .drop_duplicates()
  197. .sort_values(by=["Marke", "Standort"])
  198. .iloc[:1]
  199. .to_dict(orient="records")[0]
  200. )
  201. row["Konto_Nr_Händler"] = "01-01-0861-00-00-00"
  202. row["Konto_Nr_SKR51"] = "01-01-0861-00-00-00"
  203. df_translate = pd.concat([df_translate, pd.DataFrame.from_records([row])])
  204. # print(df_translate.tail())
  205. # df_translate.drop(columns=['duplicated'], inplace=True)
  206. df_translate.drop_duplicates(inplace=True)
  207. df_translate.set_index("Konto_Nr_Händler")
  208. return df_translate
  209. def special_translation(self, df: pd.DataFrame, makes: pd.DataFrame):
  210. df = self.extract_acct_info(df)
  211. # df = df_translate.reset_index(drop=True).drop(columns=['Kostenträger_Ebene']).drop_duplicates()
  212. logging.info(df.shape)
  213. logging.info(df.columns)
  214. logging.info(df.head())
  215. # df = df.merge(df_translate, how='inner', on='Konto_Nr_SKR51')
  216. logging.info("df: " + str(df.shape))
  217. df["Bilanz"] = df["Konto_Nr"].str.match(r"^[013]")
  218. df["Kontoart"] = np.where(df["Bilanz"], "1", "2")
  219. df["Kontoart"] = np.where(df["Konto_Nr"].str.contains("_STK"), "3", df["Kontoart"])
  220. df["Kontoart"] = np.where(df["Konto_Nr"].str.match(r"^[9]"), "3", df["Kontoart"])
  221. df["Konto_1"] = df["Konto_Nr"].str.slice(0, 1)
  222. # Hack für fehlende Markenzuordnung
  223. df = df.merge(makes, how="left", on="Marke")
  224. df["Marke"] = np.where(df["Marke_HBV"].isna(), "99", df["Marke"])
  225. df_debug = df.drop(columns=["Bilanz"])
  226. logging.info(df_debug.groupby(["Kontoart"]).aggregate("sum"))
  227. logging.info(df_debug.groupby(["Kontoart", "Konto_1"]).aggregate("sum"))
  228. logging.info(df_debug.groupby(["Konto_Nr"]).aggregate("sum"))
  229. df_debug.groupby(["Konto_Nr"]).aggregate("sum").to_csv(
  230. self.debug_file, decimal=",", sep=";", encoding="latin-1"
  231. )
  232. # Bereinigung GW-Kostenträger
  233. df["NW_Verkauf_1"] = (df["Konto_Nr"].str.match(r"^[78]0")) & (df["Kostenstelle"].str.match(r"^[^1]\d"))
  234. df["Kostenstelle"] = np.where(df["NW_Verkauf_1"] == True, "11", df["Kostenstelle"])
  235. df["Konto_7010"] = df["Konto_Nr"].str.match(r"^[78]01[01]")
  236. df["Kostenstelle"] = np.where(df["Konto_7010"] == True, "14", df["Kostenstelle"])
  237. df["GW_Verkauf_2"] = (df["Konto_Nr"].str.match(r"^[78]1")) & (df["Kostenstelle"].str.match(r"^[^2]\d"))
  238. df["Kostenstelle"] = np.where(df["GW_Verkauf_2"] == True, "21", df["Kostenstelle"])
  239. df["GW_Verkauf_3"] = (df["Konto_Nr"].str.match(r"^[78]3")) & (df["Kostenstelle"].str.match(r"^[^3]\d"))
  240. df["Kostenstelle"] = np.where(df["GW_Verkauf_3"] == True, "31", df["Kostenstelle"])
  241. df["GW_Verkauf_4"] = (df["Konto_Nr"].str.match(r"^[78]4")) & (df["Kostenstelle"].str.match(r"^[^4]\d"))
  242. df["Kostenstelle"] = np.where(df["GW_Verkauf_4"] == True, "41", df["Kostenstelle"])
  243. df["GW_Verkauf_x420"] = df["Konto_Nr"].str.match(r"^[78]420")
  244. df["Kostenstelle"] = np.where(df["GW_Verkauf_x420"] == True, "42", df["Kostenstelle"])
  245. df["GW_Verkauf_5"] = (df["Konto_Nr"].str.match(r"^[78]5")) & (df["Kostenstelle"].str.match(r"^[^5]\d"))
  246. df["Kostenstelle"] = np.where(df["GW_Verkauf_5"] == True, "51", df["Kostenstelle"])
  247. df["GW_Verkauf_50"] = (df["Konto_Nr"].str.match(r"^[78]")) & (df["Kostenstelle"].str.match(r"^2"))
  248. df["Kostenträger"] = np.where(df["GW_Verkauf_50"] == True, "52", df["Kostenträger"])
  249. df["Kostenträger"] = np.where(
  250. (df["GW_Verkauf_50"] == True) & (df["Marke"] == "01"),
  251. "50",
  252. df["Kostenträger"],
  253. )
  254. df["NW_Verkauf_00"] = (
  255. (df["Konto_Nr"].str.match(r"^[78]2"))
  256. & (df["Kostenstelle"].str.match(r"^1"))
  257. & (df["Kostenträger"].str.match(r"^[^01234]"))
  258. )
  259. df["Kostenträger"] = np.where(df["NW_Verkauf_00"] == True, "00", df["Kostenträger"])
  260. df["GW_Stk_50"] = (df["Konto_Nr"].str.match(r"^9130")) & (df["Kostenstelle"].str.match(r"^2"))
  261. df["Kostenträger"] = np.where(df["GW_Stk_50"] == True, "52", df["Kostenträger"])
  262. df["Kostenträger"] = np.where((df["GW_Stk_50"] == True) & (df["Marke"] == "01"), "50", df["Kostenträger"])
  263. df["Kostenträger"] = np.where(df["Bilanz"] == True, "00", df["Kostenträger"])
  264. df["Konto_5er"] = (df["Konto_Nr"].str.match("^5")) | (df["Konto_Nr"].str.match("^9143"))
  265. df["Absatzkanal"] = np.where(df["Konto_5er"] == True, "99", df["Absatzkanal"])
  266. df["Konto_5005"] = (df["Konto_Nr"].str.match("^5005")) & (df["Kostenstelle"].str.match(r"^[^12]"))
  267. df["Kostenstelle"] = np.where(df["Konto_5005"] == True, "20", df["Kostenstelle"])
  268. df["Kostenträger"] = np.where(df["Konto_5005"] == True, "50", df["Kostenträger"])
  269. df["Konto_5007"] = (df["Konto_Nr"].str.match("^5007")) & (df["Kostenstelle"].str.match(r"^([^4]|42)"))
  270. df["Kostenstelle"] = np.where(df["Konto_5007"] == True, "41", df["Kostenstelle"])
  271. df["Kostenträger"] = np.where(df["Konto_5007"] == True, "70", df["Kostenträger"])
  272. df["Konto_914er"] = (df["Konto_Nr"].str.match("^914[34]")) & (df["Kostenträger"].str.match(r"^[^7]"))
  273. df["Kostenträger"] = np.where(df["Konto_914er"] == True, "70", df["Kostenträger"])
  274. df["Teile_30_60"] = (
  275. (df["Konto_Nr"].str.match(r"^[578]"))
  276. & (df["Kostenstelle"].str.match(r"^[3]"))
  277. & (df["Kostenträger"].str.match(r"^[^6]"))
  278. )
  279. df["Kostenträger"] = np.where(df["Teile_30_60"] == True, "60", df["Kostenträger"])
  280. df["Service_40_70"] = (
  281. (df["Konto_Nr"].str.match(r"^[578]"))
  282. & (df["Kostenstelle"].str.match(r"^[4]"))
  283. & (df["Kostenträger"].str.match(r"^[^7]"))
  284. )
  285. df["Kostenträger"] = np.where(df["Service_40_70"] == True, "70", df["Kostenträger"])
  286. df["KRM"] = df["Marke"] + df["Standort"] + df["Kostenstelle"] + df["Absatzkanal"] + df["Kostenträger"]
  287. df["IsNumeric"] = (df["KRM"].str.isdigit()) & (df["Konto_Nr"].str.isdigit()) & (df["Konto_Nr"].str.len() == 4)
  288. return df
  289. def load_bookings_from_file(self):
  290. df2 = []
  291. timestamps = []
  292. for csv_file in self.account_bookings:
  293. df2.append(
  294. pd.read_csv(
  295. csv_file,
  296. decimal=",",
  297. sep=";",
  298. encoding="latin-1",
  299. converters={0: str, 1: str},
  300. )
  301. )
  302. timestamps.append(Path(csv_file).stat().st_mtime)
  303. df_bookings = pd.concat(df2)
  304. self.booking_date = datetime.fromtimestamp(max(timestamps))
  305. # Kontensalden auf gegebenen Monat filtern
  306. filter_from = self.current_year + self.first_month_of_financial_year
  307. filter_prev = self.last_year + self.first_month_of_financial_year
  308. if self.first_month_of_financial_year > self.current_month:
  309. filter_from = self.last_year + self.first_month_of_financial_year
  310. filter_prev = self.last_year2 + self.first_month_of_financial_year
  311. filter_to = self.current_year + self.current_month
  312. filter_opening = self.current_year + "00"
  313. filter_prev_opening = self.last_year + "00"
  314. prev_year_closed = True
  315. df_opening_balance = df_bookings[(df_bookings["Bookkeep Period"] == filter_opening)]
  316. if df_opening_balance.shape[0] == 0:
  317. df_opening_balance = df_bookings[
  318. (df_bookings["Bookkeep Period"] == filter_prev_opening)
  319. | ((df_bookings["Bookkeep Period"] >= filter_prev) & (df_bookings["Bookkeep Period"] < filter_from))
  320. ].copy()
  321. df_opening_balance["Bookkeep Period"] = filter_opening
  322. prev_year_closed = False
  323. # df_opening_balance = df_opening_balance.merge(df_translate, how='inner', on='Konto_Nr_Händler')
  324. df_opening_balance = df_opening_balance[(df_opening_balance["Konto_Nr_Händler"].str.contains(r"-[013]\d\d+-"))]
  325. df_opening_balance["amount"] = (df_opening_balance["Debit Amount"] + df_opening_balance["Credit Amount"]).round(
  326. 2
  327. )
  328. # df_opening_balance.drop(columns=['Debit Amount', 'Credit Amount', 'Debit Quantity', 'Credit Quantity'], inplace=True)
  329. # df_opening_balance = df_opening_balance.groupby(['Marke', 'Standort']).sum()
  330. opening_balance = df_opening_balance["amount"].aggregate("sum").round(2)
  331. logging.info("Gewinn/Verlustvortrag")
  332. logging.info(opening_balance)
  333. if not prev_year_closed:
  334. row = {
  335. "Konto_Nr_Händler": "01-01-0861-00-00-00",
  336. "Bookkeep Period": filter_opening,
  337. "Debit Amount": opening_balance * -1,
  338. "Credit Amount": 0,
  339. "Debit Quantity": 0,
  340. "Credit Quantity": 0,
  341. "amount": opening_balance * -1,
  342. }
  343. df_opening_balance = pd.concat([df_opening_balance, pd.DataFrame.from_records([row])])
  344. df_bookings = df_bookings[
  345. (df_bookings["Bookkeep Period"] >= filter_from) & (df_bookings["Bookkeep Period"] <= filter_to)
  346. ]
  347. df_bookings["amount"] = (df_bookings["Debit Amount"] + df_bookings["Credit Amount"]).round(2)
  348. # Buchungen kopieren und als Statistikkonten anhängen
  349. df_stats = df_bookings.copy()
  350. # df_stats = df_stats[df_stats['Konto_Nr_Händler'].str.match(r'-[24578]\d\d\d-')]
  351. df_stats["Konto_Nr_Händler"] = df_stats["Konto_Nr_Händler"].str.replace(r"-(\d\d\d+)-", r"-\1_STK-", regex=True)
  352. df_stats["amount"] = (df_bookings["Debit Quantity"] + df_bookings["Credit Quantity"]).round(2)
  353. df_bookings = pd.concat([df_opening_balance, df_bookings, df_stats])
  354. # Spalten konvertieren
  355. df_bookings["period"] = df_bookings["Bookkeep Period"].apply(lambda x: self.bookkeep_filter[x])
  356. return df_bookings[df_bookings["amount"] != 0.00]
  357. @property
  358. def export_filename(self):
  359. return self.export_filename_for_period(self.current_year, self.current_month)
  360. @property
  361. def export_info_dir(self):
  362. return f"{self.base_dir}/Export/{self.current_year}/info/"
  363. def export_filename_for_period(self, year, month):
  364. return f"{self.base_dir}/Export/{year}/export_{year}-{month}.xml"
  365. def export_xml(self, records, bk_filter, period_no, makes, sites):
  366. record_elements = ACCOUNT_INFO + ["Decimals"] + list(bk_filter.values())[:period_no] + ["CumulatedYear"]
  367. root = ET.Element("HbvData")
  368. h = ET.SubElement(root, "Header")
  369. for k, v in self.header(makes, sites).items():
  370. ET.SubElement(h, k).text = str(v)
  371. make_list = ET.SubElement(root, "MakeList")
  372. for m in makes:
  373. e = ET.SubElement(make_list, "MakeListEntry")
  374. ET.SubElement(e, "Make").text = m["Make"]
  375. ET.SubElement(e, "MakeCode").text = m["Marke_HBV"]
  376. bm_code_list = ET.SubElement(root, "BmCodeList")
  377. for s in sites:
  378. e = ET.SubElement(bm_code_list, "BmCodeEntry")
  379. ET.SubElement(e, "Make").text = s["Make"]
  380. ET.SubElement(e, "Site").text = s["Site"]
  381. ET.SubElement(e, "BmCode").text = s["Standort_HBV"]
  382. record_list = ET.SubElement(root, "RecordList")
  383. for row in records:
  384. record = ET.SubElement(record_list, "Record")
  385. for e in record_elements:
  386. child = ET.SubElement(record, e)
  387. field = row.get(e, 0.0)
  388. if str(field) == "nan":
  389. field = "0"
  390. elif type(field) is float:
  391. field = "{:.0f}".format(field * 100)
  392. child.text = str(field)
  393. with open(self.export_filename, "w", encoding="utf-8") as fwh:
  394. fwh.write(minidom.parseString(ET.tostring(root)).toprettyxml(indent=" "))
  395. def convert_to_row(self, node):
  396. return [child.text for child in node]
  397. def convert_xml_to_csv(self, xmlfile, csvfile):
  398. record_list = ET.parse(xmlfile).getroot().find("RecordList")
  399. header = [child.tag for child in record_list.find("Record")]
  400. bookings = [self.convert_to_row(node) for node in record_list.findall("Record")]
  401. with open(csvfile, "w") as fwh:
  402. cwh = csv.writer(fwh, delimiter=";")
  403. cwh.writerow(header)
  404. cwh.writerows(bookings)
  405. return True
  406. def convert_csv_to_xml(self, csvfile, xmlfile):
  407. makes = [{"Make": "01", "Marke_HBV": "1844"}]
  408. sites = [{"Make": "01", "Site": "01", "Marke_HBV": "1844"}]
  409. with open(csvfile, "r", encoding="latin-1") as frh:
  410. csv_reader = csv.DictReader(frh, delimiter=";")
  411. self.export_xml(csv_reader, self.bookkeep_filter(), 1, makes, sites, xmlfile)
  412. def gchr_local(base_dir):
  413. for path in Path(base_dir).glob("*"):
  414. if path.is_dir():
  415. print(path.name)
  416. gchr_export(str(path))
  417. def gchr_export(base_dir):
  418. gchr = GCHR(base_dir)
  419. gchr.export_all_periods(overwrite=True, today="2022-08-01")
  420. if __name__ == "__main__":
  421. base_dir = os.getcwd() + "/../GCHR2_Testdaten/Kunden"
  422. if Path(base_dir).exists():
  423. gchr_local(base_dir)