|
@@ -1,9 +1,12 @@
|
|
|
+import hashlib
|
|
|
import logging
|
|
|
+import os
|
|
|
+import re
|
|
|
+import shutil
|
|
|
import pandas as pd
|
|
|
import json
|
|
|
from pathlib import Path
|
|
|
-from datetime import date
|
|
|
-from dateutil.relativedelta import relativedelta
|
|
|
+from datetime import datetime, timedelta
|
|
|
from sqlalchemy import create_engine
|
|
|
from suds.client import Client
|
|
|
from cryptography.fernet import Fernet
|
|
@@ -28,17 +31,13 @@ def get_config():
|
|
|
return config
|
|
|
|
|
|
|
|
|
-def conn_string(dsn):
|
|
|
+def conn_string(dsn: dict[str, str]):
|
|
|
return f"mssql+pyodbc://{dsn['user']}:{dsn['pass']}@{dsn['server']}/{dsn['database']}?driver=SQL+Server+Native+Client+11.0"
|
|
|
|
|
|
|
|
|
-def load_data(config, source, year=None, month=None):
|
|
|
- if year is None:
|
|
|
- curr_date = date.today() - relativedelta(months=+1)
|
|
|
- year = curr_date.strftime("%Y")
|
|
|
- month = curr_date.strftime("%m")
|
|
|
- period = f"{year}-{month}"
|
|
|
- period2 = int(f"{year}{month}")
|
|
|
+def load_data(config: dict[str, str], source: str, period: str):
|
|
|
+ year = period[:4]
|
|
|
+ month = period[4:6]
|
|
|
|
|
|
select_befehl_auftraege = f"SELECT * FROM [Auftraege_NASA_gruppiert] WHERE Periode = '{period}'"
|
|
|
select_befehl_mitarbeiter = f"SELECT * FROM [Mitarbeiter_NASA] WHERE Periode = '{period}'"
|
|
@@ -61,12 +60,16 @@ def load_data(config, source, year=None, month=None):
|
|
|
if source == "database":
|
|
|
source_db = create_engine(conn_string(config["source_dsn"]))
|
|
|
df = pd.read_sql(select_befehl_auftraege, con=source_db)
|
|
|
+ rename_from = ["AuftragsArt", "AuftragsTyp"]
|
|
|
+ rename_to = ["AuftragsArtId_Name", "AuftragsArt"]
|
|
|
+ df = df.rename(columns=dict(zip(rename_from, rename_to)))
|
|
|
+
|
|
|
else:
|
|
|
df = pd.read_csv(source_auftraege, sep=";", encoding="latin-1", decimal=",")
|
|
|
- df = df[df["Periode"] == period2]
|
|
|
+ df = df[df["Periode"] == period]
|
|
|
|
|
|
- # auftragsart = ["Extern", "Garantie", "Intern", "Theke"]
|
|
|
- # auftragstyp = ["Inspektion", "Karosseriearbeit", "Lackierung", "Verschleißteile", "Sonstiges"]
|
|
|
+ # AuftragsArt = ["Inspektion", "Karosseriearbeit", "Lackierung", "Verschleißteile", "Sonstiges"]
|
|
|
+ # AuftragsArtId = {"1": "Extern", "2": "Garantie", "3": "Intern", "4": "Theke"]
|
|
|
|
|
|
columns = [
|
|
|
"AuftragsArt",
|
|
@@ -81,14 +84,14 @@ def load_data(config, source, year=None, month=None):
|
|
|
df = df[columns]
|
|
|
|
|
|
df.to_csv(
|
|
|
- f"export/{period}_auftraege.csv",
|
|
|
+ f"{config['export_dir']}/csv/{period}_auftraege.csv",
|
|
|
sep=";",
|
|
|
encoding="latin-1",
|
|
|
decimal=",",
|
|
|
index=False,
|
|
|
)
|
|
|
|
|
|
- payload["WerkstattDurchlaeufe"] = df["AnzahlAuftraege"].sum()
|
|
|
+ payload["WerkstattDurchlaeufe"] = int(df["AnzahlAuftraege"].sum())
|
|
|
payload["AfterSalesPositionen"] = df.to_dict("records")
|
|
|
|
|
|
# Mitarbeiter gesamt und produktiv
|
|
@@ -98,7 +101,7 @@ def load_data(config, source, year=None, month=None):
|
|
|
df = pd.read_csv(source_mitarbeiter, sep=";", encoding="latin-1", decimal=",")
|
|
|
|
|
|
df.to_csv(
|
|
|
- f"export/{period}_mitarbeiter.csv",
|
|
|
+ f"{config['export_dir']}/csv/{period}_mitarbeiter.csv",
|
|
|
sep=";",
|
|
|
encoding="latin-1",
|
|
|
decimal=",",
|
|
@@ -106,11 +109,11 @@ def load_data(config, source, year=None, month=None):
|
|
|
)
|
|
|
|
|
|
payload["AnzahlMitarbeiter"] = df.shape[0]
|
|
|
- payload["AnzahlProduktiv"] = df["Prod"].sum()
|
|
|
+ payload["AnzahlProduktiv"] = int(df["produktiv"].sum())
|
|
|
return payload
|
|
|
|
|
|
|
|
|
-def submit_data(config, payload):
|
|
|
+def submit_data(config: dict[str, str], payload):
|
|
|
client = Client(
|
|
|
url=config["service_url"],
|
|
|
username=config["credentials"]["username"],
|
|
@@ -124,7 +127,7 @@ def submit_data(config, payload):
|
|
|
return -1
|
|
|
|
|
|
|
|
|
-def print_result(period, result, len_pos):
|
|
|
+def print_result(period: str, result: str, len_pos: int):
|
|
|
print("Periode: " + period)
|
|
|
if len_pos == result:
|
|
|
print(f"Erfolgreich {result} Datensätze übertragen")
|
|
@@ -137,20 +140,85 @@ def print_result(period, result, len_pos):
|
|
|
print(f"{len_pos - result} von {len_pos} Datensätzen nicht verarbeitet!")
|
|
|
|
|
|
|
|
|
-def workflow(config, year, month):
|
|
|
- period = f"{year}-{month}"
|
|
|
- payload = load_data(config, "csv", year, month)
|
|
|
+def workflow(config: dict[str, str], year, month):
|
|
|
+ period = f"{year}{month}"
|
|
|
+ payload = load_data(config, "csv", period)
|
|
|
result = submit_data(config, payload)
|
|
|
len_pos = len(payload["AfterSalesPositionen"])
|
|
|
print_result(period, result, len_pos)
|
|
|
|
|
|
|
|
|
+def export_all_periods(config) -> None:
|
|
|
+ dt = datetime.now()
|
|
|
+ prev = str(dt.year - 1)
|
|
|
+ periods = [f"{prev}{x:02}" for x in range(1, 13)] + [f"{dt.year}{x:02}" for x in range(1, dt.month)]
|
|
|
+
|
|
|
+ for period in periods:
|
|
|
+ payload = load_data(config, "database", period)
|
|
|
+ json.dump(
|
|
|
+ payload,
|
|
|
+ open(f"export/NASA/temp/NASA_{config['client_id']}_{period}_{config['timestamp']}.json", "w"),
|
|
|
+ indent=2,
|
|
|
+ )
|
|
|
+
|
|
|
+
|
|
|
+def file_get_hash(filename: str) -> str:
|
|
|
+ with open(filename, "r") as frh:
|
|
|
+ data = frh.read()
|
|
|
+ return calculate_sha256(data)
|
|
|
+
|
|
|
+
|
|
|
+def calculate_sha256(data: str) -> str:
|
|
|
+ return hashlib.sha256(data.encode()).hexdigest()
|
|
|
+
|
|
|
+
|
|
|
+def archive_files(export_dir: str):
|
|
|
+ last_week = (datetime.now() - timedelta(days=6)).timestamp()
|
|
|
+ for file in Path(export_dir).glob("*.json"):
|
|
|
+ if file.stat().st_ctime < last_week:
|
|
|
+ file.unlink()
|
|
|
+
|
|
|
+ archive_path = Path(export_dir + "/Archiv")
|
|
|
+ for file in Path(export_dir + "/temp").glob("*.json"):
|
|
|
+ p = re.search(r"NASA_\d{5}_(20\d{4})_", file.name)
|
|
|
+ if not p:
|
|
|
+ continue
|
|
|
+ period = p[1]
|
|
|
+ year = period[:4]
|
|
|
+ dest_folder = archive_path / year / period
|
|
|
+ os.makedirs(dest_folder, exist_ok=True)
|
|
|
+ file_hash = file_get_hash(file)
|
|
|
+
|
|
|
+ if has_identical_file(dest_folder, file_hash):
|
|
|
+ file.unlink()
|
|
|
+ continue
|
|
|
+ shutil.copy(file, archive_path.parent / file.name)
|
|
|
+ file.rename(dest_folder / file.name)
|
|
|
+
|
|
|
+
|
|
|
+def has_identical_file(target: Path, file_hash: str) -> bool:
|
|
|
+ for archived_file in Path(target).glob("*.json"):
|
|
|
+ if file_get_hash(archived_file) == file_hash:
|
|
|
+ return True
|
|
|
+ return False
|
|
|
+
|
|
|
+
|
|
|
+def submit_changes(config):
|
|
|
+ for file in Path(config["export_dir"] + "/temp").glob("NASA_*.json"):
|
|
|
+ payload = json.load(file.open("r"))
|
|
|
+ period = payload["Jahr"] + payload["Monat"]
|
|
|
+ len_pos = len(payload["AfterSalesPositionen"])
|
|
|
+ result = submit_data(config, payload)
|
|
|
+ print_result(period, result, len_pos)
|
|
|
+
|
|
|
+
|
|
|
def main():
|
|
|
config = get_config()
|
|
|
- year = "2023"
|
|
|
- for p in range(1, 13):
|
|
|
- month = f"{p:02}"
|
|
|
- workflow(config, year, month)
|
|
|
+ config["timestamp"] = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
|
+ config["export_dir"] = str(Path(".").resolve() / "export" / "NASA")
|
|
|
+ export_all_periods(config)
|
|
|
+ archive_files(config["export_dir"])
|
|
|
+ submit_changes(config)
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|