Browse Source

CSV-Dateien kopieren und bereinigen

gc-server3 3 months ago
parent
commit
007f8f5e21
6 changed files with 126 additions and 83 deletions
  1. 3 3
      c11.py
  2. 6 51
      c7.py
  3. BIN
      dist/gctools.exe
  4. 38 27
      misc/csv_cleanup.py
  5. 69 0
      misc/file_move.py
  6. 10 2
      misc2.py

+ 3 - 3
c11.py

@@ -34,10 +34,10 @@ def reportoutput(folder="", mailcsv=""):
     merge_group = exp.get_merge_group(req_plan)
     exp.template(req_plan, merge_group)
 
-    req_plan_filtered = exp.filter_request_plan(req_plan, cfg.xml_dir + "/" + mailcsv)
+    req_plan_filtered = exp.filter_request_plan(req_plan, f"{cfg.xml_dir}\\{mailcsv}")
     exp.execute_request_plan(req_plan_filtered)
-    if mailcsv != "":
-        pdf_test.missing_data(cfg.cognos11.reportoutput_dir + "/" + folder)
+    if mailcsv == "":
+        pdf_test.missing_data(f"{cfg.cognos11.reportoutput_dir}\\{folder}")
 
     pdf_merge.pdf_merge_files(merge_group)
     cognos11.get_converted_jobs(cfg)

+ 6 - 51
c7.py

@@ -1,11 +1,11 @@
 import os
-from datetime import datetime
 from pathlib import Path
 
 import typer
 
 import cognos7
 import config
+from misc import file_move
 
 app = typer.Typer()
 cfg = config.Config()
@@ -30,61 +30,16 @@ def mdl_convert(mdl_file):
 
 @app.command()
 def move_csv():
-    max_age_ts = datetime.now().timestamp() - 24 * 60 * 60
     print("Verschiebe CSV-Dateien von IQD zu Export...")
-    no_files = True
-    for source in Path(f"{cfg.system_dir}\\IQD").rglob("*.csv"):
-        full_file = str(source)
-        if "_\\" in full_file or "_ori" in full_file or "_.csv" in full_file:
-            continue
-        no_files = False
-        print("* " + str(source))
-        dest = Path(f"{cfg.system_dir}\\Export\\{source.name}")
-        source_size = source.stat().st_size
-        source_ts = source.stat().st_mtime
-        if source_size <= 20:
-            print(f"!! Datei {source.name} ist leer !!")
-            continue
-        if source_ts < max_age_ts:
-            print(f"!! Datei {source.name} ist aelter als 24 Stunden !!")
-            continue
-        if dest.exists():
-            dest_size = dest.stat().st_size
-            if source_size < dest_size // 10:
-                print(f"!! Datei {source.name} ist zu klein !!")
-                print(f"{source}: {source_size // 1024} KB")
-                print(f"{dest}: {dest_size // 1024} KB")
-                print("")
-                continue
-            dest_ts = dest.stat().st_mtime
-            if source_ts < dest_ts:
-                print(f"!! Datei {source.name} ist aelter als die Zieldatei !!")
-                print(str(source) + ": " + datetime.fromtimestamp(source_ts).strftime("%d.%m.%Y, %H:%M:%S"))
-                print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
-                print("")
-                continue
-            dest.unlink()
-        os.rename(source, dest)
-    if no_files:
+    res = file_move.move(cfg.system_dir)
+
+    if res is False:
         print("* Keine CSV-Dateien im IQD-Ordner gefunden.\n")
 
     print("Pruefe Export-Ordner...")
-    ignore_file = Path(f"{cfg.system_dir}\\Export\\ignoriert.txt")
-    ignore_list = []
-    if ignore_file.exists():
-        ignore_list = ignore_file.read_text(encoding="latin-1").split("\n")
-    clean_exit = True
+    is_clean = file_move.check(cfg.system_dir)
 
-    for dest in Path(f"{cfg.system_dir}\\Export").glob("*.csv"):
-        if dest.name in ignore_list:
-            continue
-        dest_ts = dest.stat().st_mtime
-        if dest_ts < max_age_ts:
-            print(f"!! Datei {dest.name} ist aelter als 24 Stunden !!")
-            print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
-            clean_exit = False
-            continue
-    if clean_exit:
+    if is_clean:
         print("* Alle Dateien aktuell.\n")
 
 

BIN
dist/gctools.exe


+ 38 - 27
misc/csv_cleanup.py

@@ -18,42 +18,53 @@ def csv_cleanup(dirname: str = "misc/data"):
 
 def csv_cleanup_file(csv_file: Path):
     temp_file = Path(str(csv_file) + ".tmp")
+    error_file = Path(str(csv_file) + ".err")
     file_mtime = csv_file.stat().st_mtime
     if file_mtime < MIN_AGE:
         return
     print(csv_file.name)
     with open(csv_file, "r", encoding="latin-1") as frh:
         with open(temp_file, "w", encoding="latin-1") as fwh:
-            header = frh.readline()
-            fwh.write(header)
-            sep_count = header.count(";")
+            with open(error_file, "w", encoding="latin-1") as ewh:
+                header = frh.readline()
+                fwh.write(header)
+                ewh.write(header)
+                sep_count = header.count(";")
 
-            buffer = ""
-            buffer_count = 0
+                buffer = ""
+                buffer_count = 0
+                solved_count = 0
+                error_count = 0
 
-            for line in frh.readlines():
-                line = line.replace("\t", "")
-                current_count = line.count(";")
-                if buffer_count + current_count == sep_count:
-                    if buffer == "":
-                        fwh.write(line)
-                        continue
-                    fwh.write(buffer + line)
-                    print("*", end="")
-                    buffer = ""
-                    buffer_count = 0
-                elif buffer_count + current_count < sep_count:
-                    buffer += re.subn(r"[\r\n]+", "", line)[0]
-                    buffer_count = buffer.count(";")
-                else:
-                    line2 = cleanup_line(buffer + line)
-                    if line2.count(";") == sep_count:
-                        fwh.write(line2)
+                for line in frh.readlines():
+                    line = line.replace("\t", "")
+                    current_count = line.count(";")
+                    if buffer_count + current_count == sep_count:
+                        if buffer == "":
+                            fwh.write(line)
+                            continue
+                        fwh.write(buffer + line)
+                        print("*", end="")
+                        solved_count += 1
+                        buffer = ""
+                        buffer_count = 0
+                    elif buffer_count + current_count < sep_count:
+                        buffer += re.subn(r"[\r\n]+", "", line)[0]
+                        buffer_count = buffer.count(";")
                     else:
-                        print(f"Error: {line2}")
-                    buffer = ""
-                    buffer_count = 0
-            print("")
+                        line2 = cleanup_line(buffer + line)
+                        if line2.count(";") == sep_count:
+                            fwh.write(line2)
+                        else:
+                            ewh.write(line2)
+                            print("-", end="")
+                            error_count += 1
+                        buffer = ""
+                        buffer_count = 0
+                if error_count + solved_count > 0:
+                    print("")
+                if error_count > 0:
+                    print(f"!! Anzahl Fehler: {error_count} !!")
 
     os.utime(temp_file, (file_mtime, file_mtime))
     csv_file.unlink()

+ 69 - 0
misc/file_move.py

@@ -0,0 +1,69 @@
+import os
+from datetime import datetime
+from pathlib import Path
+
+
+def move(system_dir: str):
+    max_age_ts = datetime.now().timestamp() - 24 * 60 * 60
+    ignore_file = Path(f"{system_dir}\\IQD\\ignoriert.txt")
+    ignore_list = []
+    if ignore_file.exists():
+        ignore_list = ignore_file.read_text(encoding="latin-1").split("\n")
+
+    no_files = True
+    for source in Path(f"{system_dir}\\IQD").rglob("*.csv"):
+        if source.name in ignore_list:
+            continue
+
+        full_file = str(source)
+        if "_\\" in full_file or "_ori" in full_file or "_.csv" in full_file:
+            continue
+        no_files = False
+        print("* " + str(source))
+        dest = Path(f"{system_dir}\\Export\\{source.name}")
+        source_size = source.stat().st_size
+        source_ts = source.stat().st_mtime
+        if source_size <= 20:
+            print(f"!! Datei {source.name} ist leer !!")
+            continue
+        if source_ts < max_age_ts:
+            print(f"!! Datei {source.name} ist aelter als 24 Stunden !!")
+            continue
+        if dest.exists():
+            dest_size = dest.stat().st_size
+            if source_size < dest_size // 10:
+                print(f"!! Datei {source.name} ist zu klein !!")
+                print(f"{source}: {source_size // 1024} KB")
+                print(f"{dest}: {dest_size // 1024} KB")
+                print("")
+                continue
+            dest_ts = dest.stat().st_mtime
+            if source_ts < dest_ts:
+                print(f"!! Datei {source.name} ist aelter als die Zieldatei !!")
+                print(str(source) + ": " + datetime.fromtimestamp(source_ts).strftime("%d.%m.%Y, %H:%M:%S"))
+                print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
+                print("")
+                continue
+            dest.unlink()
+        os.rename(source, dest)
+    return no_files
+
+
+def check(system_dir: str):
+    max_age_ts = datetime.now().timestamp() - 24 * 60 * 60
+    ignore_file = Path(f"{system_dir}\\Export\\ignoriert.txt")
+    ignore_list = []
+    if ignore_file.exists():
+        ignore_list = ignore_file.read_text(encoding="latin-1").split("\n")
+    clean_exit = True
+
+    for dest in Path(f"{system_dir}\\Export").glob("*.csv"):
+        if dest.name in ignore_list:
+            continue
+        dest_ts = dest.stat().st_mtime
+        if dest_ts < max_age_ts:
+            print(f"!! Datei {dest.name} ist aelter als 24 Stunden !!")
+            print(str(dest) + ": " + datetime.fromtimestamp(dest_ts).strftime("%d.%m.%Y, %H:%M:%S"))
+            clean_exit = False
+            continue
+    return clean_exit

+ 10 - 2
misc2.py

@@ -2,6 +2,7 @@ import typer
 
 import config
 from misc import apache_ldap
+from misc.csv_cleanup import csv_cleanup
 from misc.headers import create_headerfiles
 
 app = typer.Typer()
@@ -10,13 +11,13 @@ cfg = config.Config()
 
 @app.command()
 def headers():
-    create_headerfiles(cfg.system_dir + "\\Export")
+    create_headerfiles(f"{cfg.system_dir}\\Export")
 
 
 @app.command()
 def ldap_backup():
     cred = cfg.cognos11.credentials
-    apache_ldap.ldap_backup(cred.username, cred.password, cfg.cognos11.config_dir + "\\apacheds_backup.ldif")
+    apache_ldap.ldap_backup(cred.username, cred.password, f"{cfg.cognos11.config_dir}\\apacheds_backup.ldif")
 
 
 @app.command()
@@ -39,5 +40,12 @@ def ldap_admin():
             break
 
 
+@app.command()
+def csv_check(path: str = ""):
+    if path == "":
+        path = f"{cfg.system_dir}\\export"
+    csv_cleanup(path)
+
+
 if __name__ == "__main__":
     headers()