Jelajahi Sumber

black formatter und requirements

gc-server3 1 tahun lalu
induk
melakukan
0cbf9f081f

+ 5 - 1
requirements.txt

@@ -1,5 +1,6 @@
-flask>=2.2.0
 beautifulsoup4
+crontab
+flask>=2.2.0
 jinja2
 lxml
 numpy
@@ -10,8 +11,11 @@ plac
 pyinstaller
 pyodbc
 pypdf2
+pysftp
 pywin32
 requests
 requests_toolbelt
+schedule
 sqlalchemy
+uptime
 xlrd

+ 14 - 15
status_client/ftp_client.py

@@ -14,7 +14,7 @@ class FtpConfig:
 
 
 class FtpClient:
-    commands = ['upload', 'download_dir']
+    commands = ["upload", "download_dir"]
 
     def __init__(self):
         self.base_dir = Path(__file__).parent
@@ -22,36 +22,35 @@ class FtpClient:
         # self.ftp_cfg = FtpConfig(**cfg)
         # self.ftp_cfg = FtpConfig("ftp.global-cube.com", "p33781016-vm", "Gcbs12ma-vm2020!!", "/")
         self.ftp_cfg = FtpConfig("ftp.global-cube.com", "u1339416173", "dihob#ratuy5kub%", "/")
-        warnings.filterwarnings('ignore')
+        warnings.filterwarnings("ignore")
 
-    @plac.pos('filename', '', type=str)
-    def upload(self, filename='CARLO.csv'):
+    @plac.pos("filename", "", type=str)
+    def upload(self, filename="CARLO.csv"):
         cnopts = pysftp.CnOpts()
         cnopts.hostkeys = None
 
-        with pysftp.Connection(self.ftp_cfg.server,
-                               username=self.ftp_cfg.username,
-                               password=self.ftp_cfg.password,
-                               cnopts=cnopts) as ftp_conn:
+        with pysftp.Connection(
+            self.ftp_cfg.server, username=self.ftp_cfg.username, password=self.ftp_cfg.password, cnopts=cnopts
+        ) as ftp_conn:
             ftp_conn.cwd(self.ftp_cfg.path)
             ftp_conn.put(filename)
             # print(res)
 
-    @plac.pos('path_from', '', type=str)
-    @plac.pos('path_to', '', type=str)
-    def download_dir(self, path_from='/server2019', path_to='./'):
+    @plac.pos("path_from", "", type=str)
+    @plac.pos("path_to", "", type=str)
+    def download_dir(self, path_from="/server2019", path_to="./"):
         cnopts = pysftp.CnOpts()
         cnopts.hostkeys = None
 
-        with pysftp.Connection(self.ftp_cfg.server,
-                               username=self.ftp_cfg.username,
-                               password=self.ftp_cfg.password, cnopts=cnopts) as ftp_conn:
+        with pysftp.Connection(
+            self.ftp_cfg.server, username=self.ftp_cfg.username, password=self.ftp_cfg.password, cnopts=cnopts
+        ) as ftp_conn:
             ftp_conn.cwd(path_from)
             files_list = ftp_conn.listdir()
             print(files_list)
             ftp_conn.get_d(path_from, path_to)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     # plac.Interpreter.call(ftp)
     FtpClient().download_dir()

+ 33 - 27
status_client/path_info.py

@@ -7,30 +7,30 @@ import psutil
 
 
 class PathInfo:
-    root_dir = ''
+    root_dir = ""
 
     ignore = []
     file_list = []
     opened_files = {}
 
     def ignore_list(self):
-        gitignore = self.root_dir + '\\.gitignore'
+        gitignore = self.root_dir + "\\.gitignore"
         if not path.exists(gitignore):
             pass
-        with open(gitignore, 'r') as f:
+        with open(gitignore, "r") as f:
             for line in f.readlines():
-                line = line.strip().replace('/', '\\').lower()
-                if line[:1] == '*':
-                    if line[-1] == '*':
+                line = line.strip().replace("/", "\\").lower()
+                if line[:1] == "*":
+                    if line[-1] == "*":
                         line = line[1:-1]
                     else:
-                        line = line[1:] + '\\'
+                        line = line[1:] + "\\"
                 else:
-                    line = '\\' + line + '\\'
+                    line = "\\" + line + "\\"
                 self.ignore.append(line)
 
     def ignored(self, filename):
-        rel_filename = '\\' + str(filename).replace(self.root_dir, '').lower() + '\\'
+        rel_filename = "\\" + str(filename).replace(self.root_dir, "").lower() + "\\"
 
         for e in self.ignore:
             if e in rel_filename:
@@ -38,12 +38,12 @@ class PathInfo:
         return False
 
     def check_dir(self, current_dir):
-        if self.root_dir == '':
+        if self.root_dir == "":
             self.root_dir = current_dir
             self.opened_files = self.process_handles()
             current_dir = Path(current_dir)
 
-        for entry in current_dir.glob('*'):
+        for entry in current_dir.glob("*"):
             if entry.is_dir():
                 self.check_dir(entry)
             elif not self.ignored(entry):
@@ -51,24 +51,30 @@ class PathInfo:
 
     def file_info(self, filename: Path):
         st = filename.stat()
-        readable = 'J' if os.access(filename, os.R_OK) else 'N'
-        writable = 'J' if os.access(filename, os.W_OK) else 'N'
-        handle = self.opened_files.get(str(filename), '')
-        blocked = 'J' if handle != '' else 'N'
-        file_info = [str(filename), str(st.st_size),
-                     datetime.fromtimestamp(st.st_ctime).isoformat(timespec='seconds'),
-                     datetime.fromtimestamp(st.st_mtime).isoformat(timespec='seconds'),
-                     readable, writable, blocked, handle]
+        readable = "J" if os.access(filename, os.R_OK) else "N"
+        writable = "J" if os.access(filename, os.W_OK) else "N"
+        handle = self.opened_files.get(str(filename), "")
+        blocked = "J" if handle != "" else "N"
+        file_info = [
+            str(filename),
+            str(st.st_size),
+            datetime.fromtimestamp(st.st_ctime).isoformat(timespec="seconds"),
+            datetime.fromtimestamp(st.st_mtime).isoformat(timespec="seconds"),
+            readable,
+            writable,
+            blocked,
+            handle,
+        ]
         return file_info
 
     def write_logfile(self, logfile):
-        with open(logfile, 'w') as fwh:
-            infos = [';'.join(line) for line in self.file_list]
-            fwh.write('name;size;ctime;mtime;read;write;blocked;process\n')
-            fwh.write('\n'.join(infos))
+        with open(logfile, "w") as fwh:
+            infos = [";".join(line) for line in self.file_list]
+            fwh.write("name;size;ctime;mtime;read;write;blocked;process\n")
+            fwh.write("\n".join(infos))
 
     def zip_to_file(self, zip_file):
-        with zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip:
+        with zipfile.ZipFile(zip_file, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip:
             for e in self.backup_list:
                 zip.write(e)
 
@@ -83,9 +89,9 @@ class PathInfo:
         return files
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     ti = PathInfo()
-    ti.check_dir('C:\\GlobalCube')
-    ti.write_logfile('C:\\GlobalCube\\Tasks\\logs\\path_info.csv')
+    ti.check_dir("C:\\GlobalCube")
+    ti.write_logfile("C:\\GlobalCube\\Tasks\\logs\\path_info.csv")
     # print(backup_list[:10])
     # ti.zip_to_file('C:\\GAPS_Autosys\\Test.zip')

+ 45 - 18
status_client/process_monitor.py

@@ -5,9 +5,26 @@ import time
 import os
 
 
+sort_by = "memory_usage"
+columns = [
+    "name",
+    "cpu_usage",
+    "memory_usage",
+    "read_bytes",
+    "write_bytes",
+    "status",
+    "create_time",
+    "nice",
+    "n_threads",
+    "cores",
+    "username",
+]
+name_filter = ["pwrplay.exe", "httpd.exe", "impadmin.exe", "trnsfrmr.exe"]
+
+
 def get_size(bytes):
     """Returns size of bytes in a nice format"""
-    for unit in ['', 'K', 'M', 'G', 'T', 'P']:
+    for unit in ["", "K", "M", "G", "T", "P"]:
         if bytes < 1024:
             return f"{bytes:.2f}{unit}B"
         bytes /= 1024
@@ -50,36 +67,42 @@ def get_processes_info(name_filter):
             except psutil.AccessDenied:
                 username = "N/A"
 
-        processes.append({
-            'pid': pid, 'name': name, 'create_time': create_time,
-            'cores': cores, 'cpu_usage': cpu_usage, 'status': status, 'nice': nice,
-            'memory_usage': memory_usage, 'read_bytes': read_bytes, 'write_bytes': write_bytes,
-            'n_threads': n_threads, 'username': username,
-        })
+        processes.append(
+            {
+                "pid": pid,
+                "name": name,
+                "create_time": create_time,
+                "cores": cores,
+                "cpu_usage": cpu_usage,
+                "status": status,
+                "nice": nice,
+                "memory_usage": memory_usage,
+                "read_bytes": read_bytes,
+                "write_bytes": write_bytes,
+                "n_threads": n_threads,
+                "username": username,
+            }
+        )
 
     return processes
 
 
 def construct_dataframe(processes):
     df = pd.DataFrame(processes)
-    df.set_index('pid', inplace=True)
+    df.set_index("pid", inplace=True)
     df.sort_values(sort_by, inplace=True)
     # pretty printing bytes
-    df['memory_usage'] = df['memory_usage'].apply(get_size)
-    df['write_bytes'] = df['write_bytes'].apply(get_size)
-    df['read_bytes'] = df['read_bytes'].apply(get_size)
+    df["memory_usage"] = df["memory_usage"].apply(get_size)
+    df["write_bytes"] = df["write_bytes"].apply(get_size)
+    df["read_bytes"] = df["read_bytes"].apply(get_size)
     # convert to proper date format
-    df['create_time'] = df['create_time'].apply(datetime.strftime, args=("%Y-%m-%d %H:%M:%S",))
+    df["create_time"] = df["create_time"].apply(datetime.strftime, args=("%Y-%m-%d %H:%M:%S",))
     # reorder and define used columns
     df = df[columns]
     return df
 
 
-if __name__ == "__main__":
-    sort_by = "memory_usage"
-    columns = ["name", "cpu_usage", "memory_usage", "read_bytes", "write_bytes",
-               "status", "create_time", "nice", "n_threads", "cores", "username"]
-    name_filter = ["pwrplay.exe", "httpd.exe", "impadmin.exe", "trnsfrmr.exe"]
+def main():
     while True:
         processes = get_processes_info(name_filter)
         if len(processes) > 0:
@@ -87,5 +110,9 @@ if __name__ == "__main__":
             os.system("cls") if "nt" in os.name else os.system("clear")
             print(df.to_string())
         else:
-            print('No active processes.')
+            print("No active processes.")
         time.sleep(10)
+
+
+if __name__ == "__main__":
+    main()

+ 11 - 8
status_client/request.py

@@ -1,5 +1,4 @@
 import requests
-# import json
 import time
 import uptime
 from configparser import ConfigParser
@@ -10,23 +9,27 @@ tasks_dir = "C:\\GAPS\\Portal\\Tasks"
 def ping():
     current_uptime = int(uptime.boottime().timestamp())
     kunde = findName()
-    result = requests.get(f'http://localhost:5000/ping/{kunde}?uptime={current_uptime}')
+    result = requests.get(f"http://localhost:5000/ping/{kunde}?uptime={current_uptime}")
     print(result.content)
     # print(json.dumps(json.loads(result.content), indent=2))
 
 
 def findName():
     config = ConfigParser()
-    with open(tasks_dir + '\\GAPS.ini', 'r') as stream:
-        config.read_string('[default]\n' + stream.read())
-        return config['default']['KUNDE'][1:-1]
+    with open(tasks_dir + "\\GAPS.ini", "r") as stream:
+        config.read_string("[default]\n" + stream.read())
+        return config["default"]["KUNDE"][1:-1]
 
 
-while True:
-    ping()
-    time.sleep(60 * 60)
+def main():
+    while True:
+        ping()
+        time.sleep(60 * 60)
 
 
+if __name__ == "__main__":
+    main()
+
 # from crontab import CronTab
 
 # def ping():

+ 4 - 3
status_client/scheduler.py

@@ -1,10 +1,11 @@
 from crontab import CronTab
-import requests
+
+# import requests
 
 
 def ping():
-    cron = CronTab(tabfile='filename.tab')
-    job = cron.new(command='python C:\\Projekte\\Python\\request.py')
+    cron = CronTab(tabfile="filename.tab")
+    job = cron.new(command="python C:\\Projekte\\Python\\request.py")
     job.minute.every(1)
     cron.write()
     cron.run_scheduler()

+ 3 - 3
status_client/scheduler2.py

@@ -26,8 +26,8 @@ class Tasks:
         print("I'm working...")
 
 
-logging.basicConfig(filename='logs/scheduler.log', level=logging.INFO)
-schedule_logger = logging.getLogger('schedule')
+logging.basicConfig(filename="logs/scheduler.log", level=logging.INFO)
+schedule_logger = logging.getLogger("schedule")
 
 
 def main():
@@ -41,5 +41,5 @@ def main():
         time.sleep(n)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()

+ 35 - 22
status_client/status_client.py

@@ -11,34 +11,36 @@ from db_info import DatabaseInfo
 
 
 def run_command(cmd, logfile):
-    with open(logfile, 'wb') as stream:
+    with open(logfile, "wb") as stream:
         p = subprocess.Popen(cmd, stdout=stream, stderr=stream)
         p.wait()
 
 
 def task_scheduler(logfile):
-    run_command('schtasks /query /v /fo CSV', logfile)
+    run_command("schtasks /query /v /fo CSV", logfile)
 
 
 def task_manager(logfile):
-    run_command('tasklist /fo CSV', logfile)
+    run_command("tasklist /fo CSV", logfile)
 
 
 def shared_files(logfile):
-    run_command('openfiles /Query /fo CSV', logfile)
+    run_command("openfiles /Query /fo CSV", logfile)
 
 
 def model_datasources(cfg, model_file: Path, logfile):
-    cmd = f'"{cfg.cognos7.program_dir}\\runmac32.exe" "{cfg.tools_dir}\\VBS\\list-datasources-c11.mac" ' \
-          + f'"{model_file.parent}","{model_file.name}","{logfile}"'
+    cmd = (
+        f'"{cfg.cognos7.program_dir}\\runmac32.exe" "{cfg.tools_dir}\\VBS\\list-datasources-c11.mac" '
+        + f'"{model_file.parent}","{model_file.name}","{logfile}"'
+    )
     p = subprocess.Popen(cmd)
     p.wait()
 
 
 def datasources_all_models(cfg: config.Config):
-    os.makedirs(cfg.tasks_dir + '\\config\\models', exist_ok=True)
-    for model_file in Path(cfg.system_dir + '\\Models').glob('*.pyj'):
-        model_datasources(cfg, model_file, cfg.tasks_dir + '\\config\\models\\' + model_file.name + '.log')
+    os.makedirs(cfg.tasks_dir + "\\config\\models", exist_ok=True)
+    for model_file in Path(cfg.system_dir + "\\Models").glob("*.pyj"):
+        model_datasources(cfg, model_file, cfg.tasks_dir + "\\config\\models\\" + model_file.name + ".log")
 
 
 def database_info(cfg: config.Config):
@@ -46,34 +48,45 @@ def database_info(cfg: config.Config):
     result = []
     for db in dbinfo.databases():
         result.extend(dbinfo.table_size(db[0]))
-    csv_file = cfg.tasks_dir + '\\logs\\db_info.csv'
-    with open(csv_file, 'w', encoding='latin-1', newline='') as fwh:
-        wr = csv.writer(fwh, delimiter=';')
-        wr.writerow(['DatabaseName', 'SchemaName', 'TableName', 'RowCounts', 'TotalSpaceKB', 'UsedSpaceKB', 'UnusedSpaceKB', 'LastChanged'])
+    csv_file = cfg.tasks_dir + "\\logs\\db_info.csv"
+    with open(csv_file, "w", encoding="latin-1", newline="") as fwh:
+        wr = csv.writer(fwh, delimiter=";")
+        wr.writerow(
+            [
+                "DatabaseName",
+                "SchemaName",
+                "TableName",
+                "RowCounts",
+                "TotalSpaceKB",
+                "UsedSpaceKB",
+                "UnusedSpaceKB",
+                "LastChanged",
+            ]
+        )
         for row in result:
             wr.writerow(row)
     return result
 
 
 def zip_to_file(base_dir, zip_file):
-    filter = ['config/*', 'config/info/*', 'config/models/*', 'logs/*', '*.ini', '*.bat']
+    filter = ["config/*", "config/info/*", "config/models/*", "logs/*", "*.ini", "*.bat"]
 
-    with zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip:
+    with zipfile.ZipFile(zip_file, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zip:
         for f in filter:
             for e in Path(base_dir).glob(f):
-                if '.zip' not in e.name:
+                if ".zip" not in e.name:
                     zip.write(e, e.relative_to(base_dir))
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     cfg = config.Config()
 
     # Aufgabenplanung aktueller Stand
-    task_scheduler(cfg.tasks_dir + '\\logs\\schtasks.csv')
+    task_scheduler(cfg.tasks_dir + "\\logs\\schtasks.csv")
     # Laufende Prozesse
-    task_manager(cfg.tasks_dir + '\\logs\\tasklist.csv')
+    task_manager(cfg.tasks_dir + "\\logs\\tasklist.csv")
     # aktuelle Freigabe-Sessions
-    shared_files(cfg.tasks_dir + '\\logs\\openfiles.csv')
+    shared_files(cfg.tasks_dir + "\\logs\\openfiles.csv")
     # Tabellengrößen
     database_info(cfg)
 
@@ -82,10 +95,10 @@ if __name__ == '__main__':
     # filename;size;cdate;mdate
     ti = PathInfo()
     ti.check_dir(cfg.portal_dir)
-    ti.write_logfile(cfg.tasks_dir + '\\logs\\path_info.csv')
+    ti.write_logfile(cfg.tasks_dir + "\\logs\\path_info.csv")
 
     # Logdateien aus Tasks/logs und System/prot
-    timestamp = datetime.now().strftime('%Y-%m-%d_%H%M%S')
+    timestamp = datetime.now().strftime("%Y-%m-%d_%H%M%S")
     zip_file = f"{cfg.tasks_dir}\\logs\\{cfg.kunde_safe}_{timestamp}.zip"
     zip_to_file(cfg.tasks_dir, zip_file)