|
@@ -1,9 +1,11 @@
|
|
|
import os
|
|
|
+from datetime import datetime
|
|
|
from pathlib import Path
|
|
|
-import config
|
|
|
-import cognos7
|
|
|
+
|
|
|
import typer
|
|
|
|
|
|
+import cognos7
|
|
|
+import config
|
|
|
|
|
|
app = typer.Typer()
|
|
|
cfg = config.Config()
|
|
@@ -12,19 +14,80 @@ cfg = config.Config()
|
|
|
@app.command()
|
|
|
def iqd_convert():
|
|
|
iqdconv = cognos7.IqdConverter()
|
|
|
- iqdconv.output_dir = cfg.system_dir + "\\SQL\\schema\\" + cfg.system + "\\views_imr"
|
|
|
- iqdconv.run_folder(cfg.system_dir + "\\IQD")
|
|
|
+ iqdconv.output_dir = f"{cfg.system_dir}\\SQL\\schema\\{cfg.system}\\views_imr"
|
|
|
+ iqdconv.run_folder(f"{cfg.system_dir}\\IQD")
|
|
|
|
|
|
|
|
|
@app.command()
|
|
|
def mdl_convert(mdl_file):
|
|
|
cognos7.convert_file(mdl_file)
|
|
|
- source = mdl_file[:-4] + ".json"
|
|
|
- target = cfg.cognos11.specs_dir + "/../DataModel/" + Path(source).name
|
|
|
- os.makedirs(os.path.dirname(target), exist_ok=True)
|
|
|
- Path(target).unlink(missing_ok=True)
|
|
|
- os.rename(source, target)
|
|
|
+ source = Path(mdl_file[:-4] + ".json")
|
|
|
+ dest = f"{cfg.cognos11.specs_dir}\\..\\DataModel\\{source.name}"
|
|
|
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
|
|
|
+ Path(dest).unlink(missing_ok=True)
|
|
|
+ os.rename(source, dest)
|
|
|
+
|
|
|
+
|
|
|
+@app.command()
|
|
|
+def move_csv():
|
|
|
+ max_age_ts = datetime.now().timestamp() - 24 * 60 * 60
|
|
|
+ print("Verschiebe CSV-Dateien von IQD zu Export...")
|
|
|
+ no_files = True
|
|
|
+ for source in Path(f"{cfg.system_dir}\\IQD").rglob("*.csv"):
|
|
|
+ full_file = str(source)
|
|
|
+ if "_\\" in full_file or "_ori" in full_file or "_.csv" in full_file:
|
|
|
+ continue
|
|
|
+ no_files = False
|
|
|
+ print("* " + str(source))
|
|
|
+ dest = Path(f"{cfg.system_dir}\\Export\\{source.name}")
|
|
|
+ source_size = source.stat().st_size
|
|
|
+ source_ts = source.stat().st_mtime
|
|
|
+ if source_size <= 20:
|
|
|
+ print(f"!! Datei {source.name} ist leer !!")
|
|
|
+ continue
|
|
|
+ if source_ts < max_age_ts:
|
|
|
+ print(f"!! Datei {source.name} ist aelter als 24 Stunden !!")
|
|
|
+ continue
|
|
|
+ if dest.exists():
|
|
|
+ dest_size = dest.stat().st_size
|
|
|
+ if source_size < dest_size // 10:
|
|
|
+ print(f"!! Datei {source.name} ist zu klein !!")
|
|
|
+ print(f"{source}: {source_size // 1024} KB")
|
|
|
+ print(f"{dest}: {dest_size // 1024} KB")
|
|
|
+ print("")
|
|
|
+ continue
|
|
|
+ dest_ts = dest.stat().st_mtime
|
|
|
+ if source_ts < dest_ts:
|
|
|
+ print(f"!! Datei {source.name} ist aelter als die Zieldatei !!")
|
|
|
+ print(str(source) + ": " + datetime.fromtimestamp(source_ts).strftime("%d.%m.%Y, %H:%M:%S"))
|
|
|
+ print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
|
|
|
+ print("")
|
|
|
+ continue
|
|
|
+ dest.unlink()
|
|
|
+ os.rename(source, dest)
|
|
|
+ if no_files:
|
|
|
+ print("* Keine CSV-Dateien im IQD-Ordner gefunden.\n")
|
|
|
+
|
|
|
+ print("Pruefe Export-Ordner...")
|
|
|
+ ignore_file = Path(f"{cfg.system_dir}\\Export\\ignoriert.txt")
|
|
|
+ ignore_list = []
|
|
|
+ if ignore_file.exists():
|
|
|
+ ignore_list = ignore_file.read_text(encoding="latin-1").split("\n")
|
|
|
+ clean_exit = True
|
|
|
+
|
|
|
+ for dest in Path(f"{cfg.system_dir}\\Export").glob("*.csv"):
|
|
|
+ if dest.name in ignore_list:
|
|
|
+ continue
|
|
|
+ dest_ts = dest.stat().st_mtime
|
|
|
+ if dest_ts < max_age_ts:
|
|
|
+ print(f"!! Datei {dest.name} ist aelter als 24 Stunden !!")
|
|
|
+ print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
|
|
|
+ clean_exit = False
|
|
|
+ continue
|
|
|
+ if clean_exit:
|
|
|
+ print("* Alle Dateien aktuell.\n")
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
- app()
|
|
|
+ # app()
|
|
|
+ move_csv()
|