浏览代码

- DB-Create auf MySQL angepasst
- Powerplay-Export umbenannt

Robert Bedner 4 年之前
父节点
当前提交
435a18c5db
共有 8 个文件被更改,包括 80 次插入65 次删除
  1. 13 0
      database.py
  2. 32 0
      db_copy.py
  3. 33 37
      db_create.py
  4. 0 0
      gctools/powerplay-export.mac
  5. 0 0
      gctools/powerplay-export.mcx
  6. 2 2
      gctools/powerplay.py
  7. 0 4
      powerplay.py
  8. 0 22
      powerplay.vbs

+ 13 - 0
database.py

@@ -0,0 +1,13 @@
+def conn_string(dsn):
+    if dsn['driver'] == 'mssql':
+        return f"mssql+pyodbc://{dsn['user']}:{dsn['pass']}@{dsn['server']}/{dsn['database']}?driver=SQL+Server+Native+Client+11.0"
+    if dsn['driver'] == 'mysql':
+        return f"mysql+pymysql://{dsn['user']}:{dsn['pass']}@{dsn['server']}/{dsn['database']}?charset=utf8mb4"
+
+
+def bcp_conn_params(dsn):
+    return f"-S {dsn['server']} -d {dsn['database']} -U {dsn['user']} -P {dsn['pass']}"
+
+
+def mysql_conn_params(dsn):
+    return f"-h {dsn['server']} -D {dsn['database']} -u {dsn['user']} -p {dsn['pass']}"

+ 32 - 0
db_copy.py

@@ -0,0 +1,32 @@
+import sqlalchemy.dialects.mssql.base
+from sqlalchemy import create_engine, MetaData, Boolean, DATETIME
+from database import conn_string
+
+source_dsn = {'user': 'sa', 'pass': 'Mffu3011#', 'server': 'GC-SERVER1\\GLOBALCUBE', 'database': 'OPTIMA', 'driver': 'mssql'}
+source_schema = 'import'
+
+target_dsn = {'user': 'root', 'pass': '', 'server': 'localhost', 'database': 'OPTIMA', 'driver': 'mysql'}
+target_schema = ''
+
+source_db = create_engine(conn_string(source_dsn))
+source_meta = MetaData()
+source_meta.reflect(bind=source_db, schema=source_schema)
+# print('\n'.join(source_meta.tables.keys()))
+target_db = create_engine(conn_string(target_dsn))
+source_meta.bind = target_db
+
+for table in source_meta.sorted_tables:
+    table.schema = None
+
+    for col in table.c:
+        if getattr(col.type, 'collation', None) is not None:
+            col.type.collation = None
+        if type(col.type) is sqlalchemy.dialects.mssql.base.BIT:
+            col.type = Boolean()
+        if type(col.type) is DATETIME:
+            # print(table.name + '.' + col.name)
+            col.server_default = None
+        if type(col.type) is sqlalchemy.dialects.mssql.base.SMALLDATETIME:
+            col.type = DATETIME()
+
+source_meta.create_all()

+ 33 - 37
db_create.py

@@ -1,48 +1,41 @@
 import pandas as pd
 from sqlalchemy import create_engine, inspect
+from database import bcp_conn_params, conn_string
 
-
-csv_file = 'CARLO.csv'
-clients = {'1': 'M und S Fahrzeughandel GmbH'}
-client_db = '1'
-date_filter = "'2018-01-01'"
-source_dsn = {'user': 'sa', 'pass': 'Mffu3011#', 'server': 'GC-SERVER1\\GLOBALCUBE', 'database': 'DE0017'}
-source_schema = 'dbo'
-target_dsn = {'user': 'sa', 'pass': 'Mffu3011#', 'server': 'GC-SERVER1\\GLOBALCUBE', 'database': 'CARLO2'}
-target_schema = 'import'
-# stage_dir = "\\\\gc-server1\Austausch\\stage"
-stage_dir = 'C:\\GlobalCube\\System\\CARLO\\Export\\stage'
+cfg = {
+    'csv_file': 'CARLO.csv',
+    'clients': {'1': 'M und S Fahrzeughandel GmbH'},
+    'date_filter': "'2018-01-01'",
+    'source_dsn': {'user': 'sa', 'pass': 'Mffu3011#', 'server': 'GC-SERVER1\\GLOBALCUBE', 'database': 'DE0017', 'driver': 'mssql'},
+    'source_schema': 'dbo',
+    'target_dsn': {'user': 'sa', 'pass': 'Mffu3011#', 'server': 'GC-SERVER1\\GLOBALCUBE', 'database': 'CARLO2', 'driver': 'mssql'},
+    'target_schema': 'import',
+    # stage_dir = "\\\\gc-server1\Austausch\\stage",
+    'stage_dir': 'C:\\GlobalCube\\System\\CARLO\\Export\\stage'
+}
 
 
 def db_import(select_query, source_db, current_table, target_db, target_schema):
     pd.read_sql(select_query, source_db).to_sql(current_table['target'], target_db, schema=target_schema, index=False, if_exists='append')
 
 
-def conn_string(dsn):
-    return f"mssql+pyodbc://{dsn['user']}:{dsn['pass']}@{dsn['server']}/{dsn['database']}?driver=SQL+Server+Native+Client+11.0"
-
-
-def conn_params(dsn):
-    return f"-S {dsn['server']} -d {dsn['database']} -U {dsn['user']} -P {dsn['pass']}"
-
-
-df = pd.read_csv(csv_file, sep=';', encoding='ansi')
+df = pd.read_csv(cfg['csv_file'], sep=';', encoding='ansi')
 config = df[df['target'].notnull()]
 print(config.head())
 
-source_db = create_engine(conn_string(source_dsn))
+source_db = create_engine(conn_string(cfg['source_dsn']))
 source_insp = inspect(source_db)
-source_tables = source_insp.get_table_names(schema=source_schema)
+source_tables = source_insp.get_table_names(schema=cfg['source_schema'])
 source_tables_prefix = set([t.split('$')[0] for t in source_tables if '$' in t])
 print(source_tables_prefix)
 
-target_db = create_engine(conn_string(target_dsn))
+target_db = create_engine(conn_string(cfg['target_dsn']))
 target_insp = inspect(target_db)
-target_tables = target_insp.get_table_names(schema=target_schema)
+target_tables = target_insp.get_table_names(schema=cfg['target_schema'])
 
 
 for index, current_table in config.iterrows():
-    with open(stage_dir + "\\batch\\" + current_table['target'] + '.bat', 'w', encoding='cp850') as f:
+    with open(cfg['stage_dir'] + "\\batch\\" + current_table['target'] + '.bat', 'w', encoding='cp850') as f:
         f.write('@echo off \n')
         f.write('rem ==' + current_table['target'] + '==\n')
 
@@ -50,28 +43,28 @@ for index, current_table in config.iterrows():
             f.write(f"echo Ziel-Tabelle '{current_table['target']}' existiert nicht!\n")
             continue
 
-        f.write(f"del {stage_dir}\\{current_table['target']}*.* /Q /F >nul 2>nul \n")
-        f.write(f"sqlcmd.exe {conn_params(target_dsn)} -p -Q \"TRUNCATE TABLE [{target_schema}].[{current_table['target']}]\" \n")
+        f.write(f"del {cfg['stage_dir']}\\{current_table['target']}*.* /Q /F >nul 2>nul \n")
+        f.write(f"sqlcmd.exe {bcp_conn_params(cfg['target_dsn'])} -p -Q \"TRUNCATE TABLE [{cfg['target_schema']}].[{current_table['target']}]\" \n")
 
-        target_insp_cols = target_insp.get_columns(current_table['target'], schema=target_schema)
+        target_insp_cols = target_insp.get_columns(current_table['target'], schema=cfg['target_schema'])
         target_columns_list = [col['name'] for col in target_insp_cols]
         target_columns = set(target_columns_list)
 
-        for client_db, prefix in clients.items():
+        for client_db, prefix in cfg['clients'].items():
             source_table = current_table['source'].format(prefix)
 
             if source_table not in source_tables:
                 f.write(f"echo Quell-Tabelle '{source_table}' existiert nicht!\n")
                 continue
-            stage_csv = f"{stage_dir}\\{current_table['target']}_{client_db}.csv"
+            stage_csv = f"{cfg['stage_dir']}\\{current_table['target']}_{client_db}.csv"
 
             if not pd.isnull(current_table['query']):
-                select_query = current_table['query'].format(prefix, date_filter)
+                select_query = current_table['query'].format(prefix, cfg['date_filter'])
             else:
-                select_query = f"SELECT T1.* FROM [{source_schema}].[{source_table}] T1 "
+                select_query = f"SELECT T1.* FROM [{cfg['source_schema']}].[{source_table}] T1 "
 
             if not pd.isnull(current_table['filter']):
-                select_query += " WHERE " + current_table['filter'].format("", date_filter)
+                select_query += " WHERE " + current_table['filter'].format("", cfg['date_filter'])
 
             source_insp_cols = source_insp.get_columns(source_table)
             source_columns = set([col['name'] for col in source_insp_cols])
@@ -101,16 +94,19 @@ for index, current_table in config.iterrows():
 
             select_query = select_query.replace("T1.*", select_columns[:-2])
             select_query = select_query.replace("%", "%%")     # batch-Problem
+
+            insert_query = f"LOAD DATA INFILE '{stage_csv}' INTO TABLE {current_table['target']} FIELDS TERMINATED BY ',' ENCLOSED BY '\"' LINES TERMINATED BY '\n';"
+
             # print(select_query)
-            f.write(f"bcp \"{select_query}\" queryout \"{stage_csv}\" {conn_params(source_dsn)} -c -C 65001 -e \"{stage_csv[:-4]}.queryout.log\" > \"{stage_csv[:-4]}.bcp1.log\" \n")
+            f.write(f"bcp \"{select_query}\" queryout \"{stage_csv}\" {bcp_conn_params(cfg['source_dsn'])} -c -C 65001 -e \"{stage_csv[:-4]}.queryout.log\" > \"{stage_csv[:-4]}.bcp1.log\" \n")
             f.write(f"type \"{stage_csv[:-4]}.bcp1.log\" | findstr -v \"1000\" \n")
-            f.write(f"bcp [{target_schema}].[{current_table['target']}] in \"{stage_csv}\" {conn_params(target_dsn)} -c -C 65001 -e \"{stage_csv[:-4]}.in.log\" > \"{stage_csv[:-4]}.bcp2.log\" \n")
+            f.write(f"bcp [{cfg['target_schema']}].[{current_table['target']}] in \"{stage_csv}\" {bcp_conn_params(cfg['target_dsn'])} -c -C 65001 -e \"{stage_csv[:-4]}.in.log\" > \"{stage_csv[:-4]}.bcp2.log\" \n")
             f.write(f"type \"{stage_csv[:-4]}.bcp2.log\" | findstr -v \"1000\" \n")
 
 
-with open(stage_dir + "\\batch\\_all.bat", "w", encoding="cp850") as f:
+with open(cfg['stage_dir'] + "\\batch\\_all.bat", "w", encoding="cp850") as f:
     f.write("@echo off & cd /d %~dp0 \n")
-    f.write(f"del {stage_dir}\\*.* /Q /F >nul 2>nul \n\n")
+    f.write(f"del {cfg['stage_dir']}\\*.* /Q /F >nul 2>nul \n\n")
     for index, current_table in config.iterrows():
         f.write("echo ==" + current_table['target'] + "==\n")
         f.write("echo " + current_table['target'] + " >CON \n")

+ 0 - 0
gctools/publish-single-report.mac → gctools/powerplay-export.mac


+ 0 - 0
gctools/publish-single-report.mcx → gctools/powerplay-export.mcx


+ 2 - 2
gctools/powerplay.py

@@ -11,8 +11,8 @@ publish_dir = cfg.portal_dir + '\\daten'
 @plac.opt('export_format', '', type=str)
 @plac.opt('publish_subdir', '', type=str)
 def export(report_file, export_format='pdf', publish_subdir=None):
-    pub_dir = publish_dir if publish_subdir is None else publish_dir + '\\' + publish_subdir
-    cmd = f'"{cfg.cognos_dir}\\runmac32.exe" "{cfg.tools_dir}\\publish-single-report.mac" "{report_dir}","{report_file}","{export_format}","{pub_dir}"'
+    pub_dir = publish_dir if publish_subdir is None else f'{publish_dir}\\{publish_subdir}'
+    cmd = f'"{cfg.cognos_dir}\\runmac32.exe" "{cfg.tools_dir}\\powerplay-export.mac" "{report_dir}","{report_file}","{export_format}","{pub_dir}"'
     print(f"Exportiere '{report_file}' als '{export_format}' nach '{pub_dir}'...", end='')
 
     p = subprocess.Popen(cmd)

+ 0 - 4
powerplay.py

@@ -1,4 +0,0 @@
-import win32com.client
-tr = win32com.client.Dispatch('CognosTransformer.Application')
-print(tr.Version)
-tr.Visible = 1

+ 0 - 22
powerplay.vbs

@@ -1,22 +0,0 @@
-Sub Main()
-    Dim objPPRep as Object
-    Dim objRepPrt as Object
-    Set objPPRep = CreateObject ("CognosPowerPlay.Report")
-    objPPRep.Open "C:\CUBES UND BERICHTE\BEISPIEL2.PPR"
-    Set objRepPrt = objPPRep.Print
-    objRepPrt.PrintAllCharts = False
-    objRepPrt.SetListOfRowsToPrint objPPrep.Rows
-    objRepPrt.SetListOfLayersToPrint objPPRep.Layers
-    objRepPrt.SetChartToPrint objPPRep.Graphs.Item(2)
-    objRepPrt.IncludeLegend = True
-    objRepPrt.ChartTitleOnAllPages = True
-    objRepPrt.SummariesOnAllPages = True
-    objRepPrt.AxisOnAllPages = True
-    objRepPrt.Collate = True
-    objRepPrt.Copies = 1
-    objRepPrt.PrintOut
-    objRepPrt.Copies = 2
-    objRepPrt.PrintOut
-    Set objRepPrt = Nothing
-    Set objPPRep = Nothing
-End Sub