schema.py 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. import pyodbc
  2. import json
  3. def connect():
  4. c = pyodbc.connect("DSN=Autoline_direkt64;UID=kcc;PWD=kcc123")
  5. return c.cursor()
  6. def convert_desc(col):
  7. nullable = "NULL" if col.nullable == 1 else "NOT NULL"
  8. if col.type_name == 'CHAR':
  9. return f' [{col.column_name}] [varchar]({col.length}) {nullable}'
  10. if col.type_name == 'HEX':
  11. return f' [{col.column_name}] [binary]({col.length}) {nullable}'
  12. if col.type_name == 'INTEGER':
  13. return f' [{col.column_name}] [int] {nullable}'
  14. if col.type_name == 'DATE':
  15. return f' [{col.column_name}] [datetime] {nullable}'
  16. if col.type_name == 'NUMERIC':
  17. return f' [{col.column_name}] [decimal]({col.length},{col.scale}) {nullable}'
  18. if col.type_name == 'BIT':
  19. return f' [{col.column_name}] [boolean] {nullable}'
  20. if col.type_name == 'MEMO':
  21. return f' [{col.column_name}] [text] {nullable}'
  22. return ", ".join(list(map(str, col)))
  23. def convert_desc2(col):
  24. return ", ".join(list(map(str, col)))
  25. # table_name = [x[2] for x in crsr.tables(tableType='VIEW')]
  26. # open("views.txt", "w").write("\n".join(table_name))
  27. with open("tables.txt", "r") as rh:
  28. tables = rh.read().split("\n")
  29. res = {}
  30. def tables_cols():
  31. crsr = connect()
  32. for t in tables:
  33. try:
  34. cols = crsr.columns(table=t)
  35. # print([x[0] for x in crsr.description])
  36. res[t] = [convert_desc(c) for c in cols]
  37. crsr.cancel()
  38. except pyodbc.Error as e:
  39. print(e)
  40. if t != '':
  41. res[t] = []
  42. crsr = connect()
  43. json.dump(res, open("schema.json", "w"), indent=2)
  44. def pkeys():
  45. crsr = connect()
  46. for t in tables:
  47. try:
  48. cols = crsr.primaryKeys(table=t)
  49. # print([x[0] for x in crsr.description])
  50. if res.get(t) is None:
  51. res[t] = []
  52. res[t].append(f" CONSTRAINT [{t}$0] PRIMARY KEY CLUSTERED ([" + "], [".join([c.column_name for c in cols]) + "])")
  53. crsr.cancel()
  54. except pyodbc.Error as e:
  55. print(e)
  56. if t != '':
  57. res[t] = []
  58. crsr = connect()
  59. json.dump(res, open("pkeys.json", "w"), indent=2)
  60. def fkeys():
  61. crsr = connect()
  62. for t in tables:
  63. try:
  64. cols = crsr.foreignKeys(table=t)
  65. print([x[0] for x in crsr.description])
  66. if res.get(t) is None:
  67. res[t] = []
  68. res[t].append([convert_desc2(c) for c in cols])
  69. crsr.cancel()
  70. except pyodbc.Error as e:
  71. print(e)
  72. if t != '':
  73. res[t] = []
  74. crsr = connect()
  75. def tables_create():
  76. for t, cols in res.items():
  77. with open("../sql_load/schema/AUTOLINE/tables/import." + t + ".sql", "w") as wh:
  78. wh.write(f"CREATE TABLE [import].[{t}] (\n")
  79. wh.write(",\n".join(cols))
  80. wh.write(")\n\nGO\n")
  81. if __name__ == '__main__':
  82. tables_cols()
  83. pkeys()
  84. # fkeys()
  85. tables_create()