Browse Source

cet auf sqlalchemy umgestellt

gc-server3 11 months ago
parent
commit
cfb66b6f25
2 changed files with 25 additions and 15 deletions
  1. 25 15
      database/cet.py
  2. BIN
      database/dist/cet.exe

+ 25 - 15
database/cet.py

@@ -1,5 +1,7 @@
 import plac
-import pyodbc
+import pandas as pd
+from sqlalchemy import create_engine
+import psycopg2
 from os import path
 
 
@@ -25,7 +27,8 @@ def run(
     Codepage="65001",
     errorlog="error.log",
 ):
-    dsn = f"dsn={Server};uid={User};pwd={Password}"
+    # dsn = f"dsn={Server};uid={User};pwd={Password}"
+    dsn = f"postgresql://{User}:{Password}@{Server}/{database}"
     if Codepage.isnumeric():
         Codepage = "cp" + Codepage
     if mode == "queryout":
@@ -40,6 +43,8 @@ def convert_data(element):
     txt = txt.replace("False", "0").replace("True", "1")
     txt = txt.replace("\t", "").replace("\r", "").replace("\n", "")
     txt = txt.replace("\x81", "").replace("\x90", "")
+
+    txt = "" if txt in ["nan", "NaT"] else txt
     return txt
 
 
@@ -49,21 +54,26 @@ def queryout(dsn, query, csv_file, codepage, errorlog):
             query = frh.read()
 
     try:
-        conn = pyodbc.connect(dsn)
-        cursor = conn.cursor()
-        cursor.execute(query)
-    except pyodbc.InterfaceError as e:
+        conn = create_engine(dsn).connect().execution_options(stream_results=True)
+        df = pd.read_sql(query, conn, chunksize=1000)
+    except Exception as e:
         print(e.args[1])
 
-    with open(csv_file, "w", encoding=codepage) as fwh:
-        while True:
-            row = cursor.fetchone()
-            if row is None:
-                break
-            try:
-                fwh.write(("\t".join(map(convert_data, row)) + "\n"))
-            except pyodbc.DataError as e:
-                print(e.args[1])
+    with open(csv_file, "w", encoding=codepage, errors="replace") as fwh:
+        print("Kopiervorgang wird gestartet...")
+        i = 0
+        for chunk in df:
+            chunk_dict = chunk.to_dict(orient="records")
+            for row in chunk_dict:
+                try:
+                    fwh.write(("\t".join(map(convert_data, row.values())) + "\n"))
+                except Exception as e:
+                    print(e.args[1])
+            i += len(chunk_dict)
+            if (len(chunk_dict)) == 1000:
+                print(f"1000 Zeilen zum SQL Server gesendet. Insgesamt gesendet: {i}")
+        print("")
+        print(f"{i} Zeilen kopiert.")
 
 
 if __name__ == "__main__":

BIN
database/dist/cet.exe