| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133 |
- import csv
- import os
- import re
- import subprocess
- import zipfile
- from datetime import datetime
- from pathlib import Path
- import config
- from status_client.db_info import DatabaseInfo
- from status_client.ftp_client import FtpClient
- from status_client.path_info import PathInfo
- def run_command(cmd, logfile):
- with open(logfile, "wb") as stream:
- p = subprocess.Popen(cmd, stdout=stream, stderr=stream)
- p.wait()
- def task_scheduler(logfile, portal_dir):
- run_command("schtasks /query /v /fo CSV", logfile)
- logfile_filter = lambda line: portal_dir.lower() in line.lower() # noqa: E731
- filter_logfile(logfile, logfile_filter)
- def filter_logfile(logfile, logfile_filter):
- logfile_temp = logfile + ".temp"
- Path(logfile_temp).unlink(missing_ok=True)
- Path(logfile).rename(logfile_temp)
- with open(logfile_temp, "r", encoding="cp850") as frh:
- with open(logfile, "w", encoding="latin-1", newline="") as fwh:
- fwh.write(frh.readline()) # header
- fwh.writelines(line for line in frh.readlines() if logfile_filter(line))
- # os.remove(logfile_temp)
- def task_manager(logfile):
- run_command("tasklist /fo CSV", logfile)
- logfile_filter = lambda line: not re.search(r"chrome|msedge|svchost|conhost", line, re.IGNORECASE) # noqa: E731
- filter_logfile(logfile, logfile_filter)
- def shared_files(logfile):
- run_command("openfiles /Query /fo CSV", logfile)
- def model_datasources(cfg, model_file: Path, logfile):
- cmd = (
- f'"{cfg.cognos7.program_dir}\\runmac32.exe" "{cfg.tools_dir}\\VBS\\list-datasources-c11.mac" '
- + f'"{model_file.parent}","{model_file.name}","{logfile}"'
- )
- p = subprocess.Popen(cmd)
- p.wait()
- def datasources_all_models(cfg: config.Config):
- os.makedirs(cfg.tasks_dir + "\\config\\models", exist_ok=True)
- for model_file in Path(cfg.system_dir + "\\Models").glob("*.pyj"):
- model_datasources(cfg, model_file, cfg.tasks_dir + "\\config\\models\\" + model_file.name + ".log")
- def database_info(cfg: config.Config, csv_file):
- dbinfo = DatabaseInfo()
- result = []
- for db in dbinfo.databases():
- result.extend(dbinfo.table_size(db[0]))
- with open(csv_file, "w", encoding="latin-1", newline="") as fwh:
- wr = csv.writer(fwh, delimiter=";")
- wr.writerow(
- [
- "database_name",
- "schema_name",
- "table_name",
- "row_count",
- "total_space_kb",
- "last_changed",
- ]
- )
- for row in result:
- wr.writerow(row)
- return result
- def zip_to_file(base_dir, zip_file):
- filter = [
- "Tasks/config/*",
- "Tasks/config/info/*",
- "Tasks/config/models/*",
- "Tasks/logs/*",
- "Tasks/*.ini",
- "Tasks/*.bat",
- ]
- with zipfile.ZipFile(zip_file, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip:
- for f in filter:
- for e in Path(base_dir).glob(f):
- if e.is_file() and ".zip" not in e.name:
- zip.write(e, e.relative_to(base_dir))
- def system_check():
- cfg = config.Config()
- # Aufgabenplanung aktueller Stand
- task_scheduler(cfg.log_dir + "\\schtasks.csv.log", cfg.portal_dir)
- # Laufende Prozesse
- task_manager(cfg.log_dir + "\\tasklist.csv.log")
- # aktuelle Freigabe-Sessions
- # shared_files(cfg.log_dir + "\\openfiles.csv.log")
- # Tabellengrößen
- database_info(cfg, cfg.log_dir + "\\db_info.csv.log")
- # datasources_all_models(cfg)
- # Liste aller Dateien im GAPS-Verzeichnis
- # filename;size;cdate;mdate
- ti = PathInfo()
- ti.check_dir(cfg.portal_dir)
- ti.write_logfile(cfg.log_dir + "\\path_info.csv.log")
- # Logdateien aus Tasks/logs und System/prot
- timestamp = datetime.now().strftime("%Y-%m-%d_%H%M%S")
- zip_file = f"{cfg.tasks_dir}\\logs\\zip\\{cfg.kunde_safe}_{timestamp}.zip"
- os.makedirs(Path(zip_file).parent, exist_ok=True)
- zip_to_file(cfg.portal_dir, zip_file)
- # Upload auf FTP
- FtpClient().upload(zip_file)
- if __name__ == "__main__":
- system_check()
|