|
@@ -1,11 +1,14 @@
|
|
-from datetime import datetime
|
|
|
|
import calendar
|
|
import calendar
|
|
import csv
|
|
import csv
|
|
import hashlib
|
|
import hashlib
|
|
-from typing import Any, Generator, Literal
|
|
|
|
-import pyodbc
|
|
|
|
|
|
+import os
|
|
|
|
+import re
|
|
|
|
+import shutil
|
|
|
|
+from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from pathlib import Path
|
|
|
|
+from typing import Any, Generator, Literal
|
|
|
|
|
|
|
|
+import pyodbc
|
|
|
|
|
|
DSN = "dsn=GC_OPTIMA_64;uid=gaps;pwd=Gcbs12ma"
|
|
DSN = "dsn=GC_OPTIMA_64;uid=gaps;pwd=Gcbs12ma"
|
|
|
|
|
|
@@ -13,7 +16,7 @@ DSN = "dsn=GC_OPTIMA_64;uid=gaps;pwd=Gcbs12ma"
|
|
class DatevConfig:
|
|
class DatevConfig:
|
|
base_dir: str = str(Path(__file__).resolve().parent)
|
|
base_dir: str = str(Path(__file__).resolve().parent)
|
|
data_path: str = base_dir + "/data"
|
|
data_path: str = base_dir + "/data"
|
|
- export_path: str = base_dir + "/export"
|
|
|
|
|
|
+ export_path: str = base_dir + "/export/temp"
|
|
translation_file: str = data_path + "/uebersetzungstabelle.csv"
|
|
translation_file: str = data_path + "/uebersetzungstabelle.csv"
|
|
csv_date: datetime = datetime.now() # datetime(2023, 11, 20, 19, 2, 28, 714000)
|
|
csv_date: datetime = datetime.now() # datetime(2023, 11, 20, 19, 2, 28, 714000)
|
|
geschaeftsjahr_monat: int = 1
|
|
geschaeftsjahr_monat: int = 1
|
|
@@ -280,9 +283,15 @@ def export_all_periods() -> None:
|
|
prev = str(dt.year - 1)
|
|
prev = str(dt.year - 1)
|
|
periods = [f"{prev}{x:02}" for x in range(1, 13)] + [f"{dt.year}{x:02}" for x in range(1, dt.month + 1)]
|
|
periods = [f"{prev}{x:02}" for x in range(1, 13)] + [f"{dt.year}{x:02}" for x in range(1, dt.month + 1)]
|
|
|
|
|
|
- for year, month in periods:
|
|
|
|
- period = f"{year}{month}"
|
|
|
|
- export_extf(period, "db")
|
|
|
|
|
|
+ for p in periods:
|
|
|
|
+ export_extf(p, "db")
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+def extf_get_hash(filename):
|
|
|
|
+ with open(filename, "r", encoding="latin-1") as frh1:
|
|
|
|
+ frh1.readline() # ignore header
|
|
|
|
+ data = frh1.read()
|
|
|
|
+ return calculate_sha256(data)
|
|
|
|
|
|
|
|
|
|
def extf_files_equal_content(file1, file2):
|
|
def extf_files_equal_content(file1, file2):
|
|
@@ -304,8 +313,7 @@ def calculate_sha256(data) -> str:
|
|
return hashlib.sha256(data.encode()).hexdigest()
|
|
return hashlib.sha256(data.encode()).hexdigest()
|
|
|
|
|
|
|
|
|
|
-if __name__ == "__main__":
|
|
|
|
- # export_all_periods()
|
|
|
|
|
|
+def test_content():
|
|
print(
|
|
print(
|
|
extf_files_equal_content(
|
|
extf_files_equal_content(
|
|
"datev/export/EXTF_Buchungsstapel_30612_10139_202312_20240514_112734.csv",
|
|
"datev/export/EXTF_Buchungsstapel_30612_10139_202312_20240514_112734.csv",
|
|
@@ -325,4 +333,38 @@ if __name__ == "__main__":
|
|
)
|
|
)
|
|
)
|
|
)
|
|
|
|
|
|
|
|
+
|
|
|
|
+def archive_files():
|
|
|
|
+ last_week = (datetime.now() - timedelta(days=6)).timestamp()
|
|
|
|
+ for file in Path("datev/export").glob("*.csv"):
|
|
|
|
+ if file.stat().st_ctime < last_week:
|
|
|
|
+ file.unlink()
|
|
|
|
+
|
|
|
|
+ archive_path = Path("datev/export/Archiv")
|
|
|
|
+ for file in Path("datev/export/temp").glob("*.csv"):
|
|
|
|
+ p = re.search(r"_(\d{6})_", file.name)
|
|
|
|
+ if not p:
|
|
|
|
+ continue
|
|
|
|
+ period = p[1]
|
|
|
|
+ target = archive_path / period[:4] / period
|
|
|
|
+ os.makedirs(target, exist_ok=True)
|
|
|
|
+ file_hash = extf_get_hash(file)
|
|
|
|
+
|
|
|
|
+ if has_identical_file(target, file_hash):
|
|
|
|
+ file.unlink()
|
|
|
|
+ continue
|
|
|
|
+ shutil.copy(file, archive_path.parent / file.name)
|
|
|
|
+ file.rename(target / file.name)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+def has_identical_file(target: Path, file_hash: str) -> bool:
|
|
|
|
+ for previous in Path(target).glob("*.csv"):
|
|
|
|
+ if extf_get_hash(previous) == file_hash:
|
|
|
|
+ return True
|
|
|
|
+ return False
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+if __name__ == "__main__":
|
|
|
|
+ export_all_periods()
|
|
|
|
+ archive_files()
|
|
# os.makedirs(Path(filename).parent.joinpath("info"), exist_ok=True)
|
|
# os.makedirs(Path(filename).parent.joinpath("info"), exist_ok=True)
|