Ver código fonte

Enhance report output functionality and improve log file handling

- Added flatten_chain function to simplify nested list processing.
- Updated reportoutput function to handle folder input and retry logic for report exports.
- Introduced reportoutput_by_date function for date-based report retrieval.
- Improved error handling in check_logfiles function for log file existence.
- Updated version to 1.1 and version date to 25.06.2025.
- Refactored missing_data function to manage missing PDF files more effectively.
gc-server3 5 meses atrás
pai
commit
b63f07e23d
6 arquivos alterados com 123 adições e 48 exclusões
  1. 85 5
      c11.py
  2. 4 21
      cognos11/c11_export.py
  3. 18 12
      database/bcp_log.py
  4. BIN
      dist/gctools.exe
  5. 2 2
      gctools.py
  6. 14 8
      pdf/pdf_test.py

+ 85 - 5
c11.py

@@ -1,6 +1,9 @@
 import os
 import re
+import time
+from datetime import datetime
 from enum import Enum
+from itertools import chain
 from pathlib import Path
 
 import typer
@@ -11,6 +14,10 @@ import config
 from pdf import pdf_merge, pdf_test
 
 
+def flatten_chain(list_of_lists):
+    return list(chain.from_iterable(list_of_lists))
+
+
 class ExportFormat(Enum):
     PDF = "PDF"
     XML = "XML"
@@ -27,15 +34,35 @@ def export(folder="", overwrite="0"):
 
 
 @app.command()
-def reportoutput(folder="", mailcsv=""):
-    exp = cognos11.c11_export(cfg)
+def reportoutput(folder="", mailcsv="", exp=None):
+
+    if re.match(r"^[\d\.\-\/]+$", folder):
+        return reportoutput_by_date(folder, mailcsv)
+
+    if exp is None:
+        exp = cognos11.c11_export(cfg)
+
     # folder2 = exp.get_folder(folder)
     req_plan = exp.get_folder_pdf_request_plan(folder)
     merge_group = exp.get_merge_group(req_plan)
     exp.template(req_plan, merge_group)
 
     req_plan_filtered = exp.filter_request_plan(req_plan, f"{cfg.xml_dir}\\{mailcsv}")
-    exp.execute_request_plan(req_plan_filtered)
+    req_plan_flat = flatten_chain(req_plan_filtered)
+    print("Exportiere Reports...")
+    for i in range(1, 6):
+        print(f"Durchlauf {i}...")
+        req_plan_flat = exp.execute_request_plan(req_plan_flat)
+        if not req_plan_flat:
+            break
+        time.sleep(20)
+        exp.api.login()
+
+    if req_plan_flat:
+        print("!! Fehler: Einige Reports konnten nicht exportiert werden !!")
+        for report_req in req_plan_flat:
+            print(Path(report_req.filename).relative_to(cfg.cognos11.reportoutput_dir))
+
     if mailcsv == "":
         pdf_test.missing_data(f"{cfg.cognos11.reportoutput_dir}\\{folder}")
 
@@ -44,6 +71,59 @@ def reportoutput(folder="", mailcsv=""):
     # pdf_merge.merge_reports_in_folder(self.cfg, folder2)
 
 
+def reportoutput_by_date(date_str: str, mailcsv: str = "") -> None:
+    current_date = None
+    if re.match(r"^\d{4}-\d{2}-\d{2}$", date_str):
+        current_date = datetime.strptime(date_str, "%Y-%m-%d")
+    elif re.match(r"^\d{2}\.\d{2}\.\d{4}$", date_str):
+        current_date = datetime.strptime(date_str, "%d.%m.%Y")
+    elif re.match(r"^\d{2}\/\d{2}\/\d{4}$", date_str):
+        current_date = datetime.strptime(date_str, "%m/%d/%Y")
+
+    if current_date is None:
+        print(f"Ungültiges Datumsformat: {date_str}")
+        return
+
+    exp = cognos11.c11_export(cfg)
+    folders = [f["name"] for f in exp.api.get_folders()]
+    # for f in folders:
+    #     print(f)
+
+    weekdays = ["Mo", "Di", "Mi", "Do", "Fr", "Sa", "So"]
+    current_weekday = weekdays[current_date.weekday()]
+    print(f"Berichte für {current_weekday}, {current_date.strftime('%d.%m.%Y')}...")
+    # taeglich
+    daily_interval = ["Mo-Fr", "Mo-Sa", "Mo-So", "Di-So"]
+    daily_interval_special = {
+        "Mo": ["Mo-Fr", "Mo-Sa", "Mo-So"],
+        "Sa": ["Mo-Sa", "Di-So"],
+        "So": ["Mo-So", "Di-So"]
+    }
+    current_interval = daily_interval_special.get(current_weekday, daily_interval)
+    for c in current_interval:
+        search = f"Team Content/ReportOutput/taeglich/{c}"
+        for f in folders:
+            if search in f:
+                print(f)
+    # woechentlich
+    search = f"Team Content/ReportOutput/woechentlich/{current_weekday}"
+    for f in folders:
+        if search in f:
+            print(f)
+    # 14-taeglich
+    week = (int(current_date.strftime("%W")) % 2) + 1
+    search = f"Team Content/ReportOutput/14-taeglich/{current_weekday}{week}"
+    for f in folders:
+        if search in f:
+            print(f)
+    # monatlich
+    day_of_month = current_date.strftime("%d")
+    search = f"Team Content/ReportOutput/monatlich/{day_of_month}"
+    for f in folders:
+        if search in f:
+            print(f)
+
+
 @app.command()
 def merge(folder="", config=""):
     folder2 = cognos11.c11_export.get_folder(folder)
@@ -115,6 +195,6 @@ def version():
 
 
 if __name__ == "__main__":
-    # app()
+    app()
     # reportoutput()
-    version()
+    # reportoutput("2023-10-02")

+ 4 - 21
cognos11/c11_export.py

@@ -1,7 +1,6 @@
 import json
 import logging
 import os
-import time
 from collections import defaultdict
 from dataclasses import dataclass
 from datetime import datetime
@@ -180,29 +179,13 @@ class c11_export:
                 res.append([req for req in req_group if str(Path(req.filename).resolve()) in required_files])
         return res
 
-    def execute_request_plan(self, req_plan: list[list[ReportRequest]]):
+    def execute_request_plan(self, req_plan: list[ReportRequest]) -> list[ReportRequest]:
         failed_requests = []
-        for req_group in req_plan:
-            for report_req in req_group:
-                print(Path(report_req.filename).relative_to(self.cfg.cognos11.reportoutput_dir))
-                if not self.request_and_save_file(report_req):
-                    failed_requests.append(report_req)
-
-        if len(failed_requests) == 0:
-            return
-        time.sleep(20)  # wait for a while before retrying
-        failed_again = []
-        print("Zweiter Versuch bei folgenden Berichten:")
-        for report_req in failed_requests:
+        for report_req in req_plan:
             print(Path(report_req.filename).relative_to(self.cfg.cognos11.reportoutput_dir))
             if not self.request_and_save_file(report_req):
-                failed_again.append(report_req)
-
-        if len(failed_again) == 0:
-            return
-        print("Die folgenden Berichte konnten nicht erstellt werden:")
-        for report_req in failed_again:
-            print(Path(report_req.filename).relative_to(self.cfg.cognos11.reportoutput_dir))
+                failed_requests.append(report_req)
+        return failed_requests
 
     def request_and_save_file(self, report_request: ReportRequest, save=True):
         logging.debug(report_request.filename)

+ 18 - 12
database/bcp_log.py

@@ -50,22 +50,28 @@ def check_logfiles(prefix: str, base_dir: str) -> BulkcopyResult:
     ts = datetime.fromtimestamp(Path(f"{base_dir}/{prefix}.bcp2.log").stat().st_mtime)
     result = BulkcopyResult(file_name=prefix, timestamp=ts)
 
-    with open(f"{base_dir}/{prefix}.in.log", "r") as frh:
-        result.ignored = len(frh.readlines()) // 2
+    in_log = Path(f"{base_dir}\\{prefix}.in.log")
+    if in_log.exists():
+        with in_log.open("r") as frh:
+            result.ignored = len(frh.readlines()) // 2
 
     # info output of export
-    with open(f"{base_dir}/{prefix}.bcp1.log", "r", encoding="cp850", errors="ignore") as frh:
-        raw_logs = frh.read()
-        result.exported = rows_copied(raw_logs)
-        result.export_duration = total_time(raw_logs)
+    bcp1_log = Path(f"{base_dir}\\{prefix}.bcp1.log")
+    if bcp1_log.exists():
+        with bcp1_log.open("r", encoding="cp850", errors="ignore") as frh:
+            raw_logs = frh.read()
+            result.exported = rows_copied(raw_logs)
+            result.export_duration = total_time(raw_logs)
 
     # info output of import
-    with open(f"{base_dir}/{prefix}.bcp2.log", "r", encoding="cp850", errors="ignore") as frh:
-        raw_logs = frh.read()
-        result.imported = rows_copied(raw_logs)
-        result.import_duration = total_time(raw_logs)
-
-    csv_file = Path(f"{base_dir}/{prefix}.csv")
+    bcp2_log = Path(f"{base_dir}\\{prefix}.bcp2.log")
+    if bcp2_log.exists():
+        with bcp2_log.open("r", encoding="cp850", errors="ignore") as frh:
+            raw_logs = frh.read()
+            result.imported = rows_copied(raw_logs)
+            result.import_duration = total_time(raw_logs)
+
+    csv_file = Path(f"{base_dir}\\{prefix}.csv")
     if csv_file.exists():
         result.file_size = csv_file.stat().st_size
     return result

BIN
dist/gctools.exe


+ 2 - 2
gctools.py

@@ -8,8 +8,8 @@ import misc2
 import status
 import xls
 
-version = "1.0"
-version_date = "25.02.2025"
+version = "1.1"
+version_date = "25.06.2025"
 
 app = typer.Typer(
     help=(

+ 14 - 8
pdf/pdf_test.py

@@ -1,10 +1,12 @@
+import json
 import os
-import pdfplumber
-from pdfminer.pdfparser import PDFSyntaxError
 import re
-import json
-from pathlib import Path
 from datetime import datetime, timedelta
+from pathlib import Path
+
+import pdfplumber
+from pdfminer.pdfparser import PDFSyntaxError
+from pdfplumber.utils.exceptions import PdfminerException
 
 
 def current_date_test(base_dir: str):
@@ -37,10 +39,14 @@ def current_date_test(base_dir: str):
 
 def missing_data(base_dir: str):
     for f in Path(base_dir).iterdir():
-        if f.is_dir() and f.name not in [".", "..", "leer"]:
+        if f.is_dir() and f.name not in [".", "..", "leer", "_leer"]:
             missing_data(str(f))
     print(base_dir)
-    os.makedirs(base_dir + "/leer", exist_ok=True)
+    try:
+        Path(base_dir + "/leer").unlink(missing_ok=True)
+    except Exception:
+        pass
+    os.makedirs(base_dir + "/_leer", exist_ok=True)
 
     errors = []
     for f in Path(base_dir).glob("*.pdf"):
@@ -48,7 +54,7 @@ def missing_data(base_dir: str):
             with pdfplumber.open(str(f)) as pdf:
                 pages = len(pdf.pages)
                 text = pdf.pages[0].extract_text()
-        except PDFSyntaxError:
+        except (PDFSyntaxError, PdfminerException):
             pages = 0
             text = ""
 
@@ -61,7 +67,7 @@ def missing_data(base_dir: str):
             ]
         ):
             errors.append(f.name)
-            target = Path(base_dir + "/leer/" + f.name)
+            target = Path(base_dir + "/_leer/" + f.name)
             target.unlink(missing_ok=True)
             f.rename(target)