db_schema.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. import pyodbc
  2. import json
  3. def connect():
  4. c = pyodbc.connect("DSN=Autoline_direkt64;UID=kcc;PWD=kcc123")
  5. return c.cursor()
  6. def convert_desc(col):
  7. nullable = "NULL" if col.nullable == 1 else "NOT NULL"
  8. if col.type_name == "CHAR":
  9. return f" [{col.column_name}] [varchar]({col.length}) {nullable}"
  10. if col.type_name == "HEX":
  11. return f" [{col.column_name}] [binary]({col.length}) {nullable}"
  12. if col.type_name == "INTEGER":
  13. return f" [{col.column_name}] [int] {nullable}"
  14. if col.type_name == "DATE":
  15. return f" [{col.column_name}] [datetime] {nullable}"
  16. if col.type_name == "NUMERIC":
  17. return f" [{col.column_name}] [decimal]({col.length},{col.scale}) {nullable}"
  18. if col.type_name == "BIT":
  19. return f" [{col.column_name}] [boolean] {nullable}"
  20. if col.type_name == "MEMO":
  21. return f" [{col.column_name}] [text] {nullable}"
  22. return ", ".join(list(map(str, col)))
  23. def convert_desc2(col):
  24. return ", ".join(list(map(str, col)))
  25. # table_name = [x[2] for x in crsr.tables(tableType='VIEW')]
  26. # open("views.txt", "w").write("\n".join(table_name))
  27. with open("tables.txt", "r") as rh:
  28. tables = rh.read().split("\n")
  29. res = {}
  30. def tables_cols():
  31. crsr = connect()
  32. for t in tables:
  33. try:
  34. cols = crsr.columns(table=t)
  35. # print([x[0] for x in crsr.description])
  36. res[t] = [convert_desc(c) for c in cols]
  37. crsr.cancel()
  38. except pyodbc.Error as e:
  39. print(e)
  40. if t != "":
  41. res[t] = []
  42. crsr = connect()
  43. json.dump(res, open("schema.json", "w"), indent=2)
  44. def pkeys():
  45. crsr = connect()
  46. for t in tables:
  47. try:
  48. cols = crsr.primaryKeys(table=t)
  49. # print([x[0] for x in crsr.description])
  50. if res.get(t) is None:
  51. res[t] = []
  52. res[t].append(
  53. f" CONSTRAINT [{t}$0] PRIMARY KEY CLUSTERED (["
  54. + "], [".join([c.column_name for c in cols])
  55. + "])"
  56. )
  57. crsr.cancel()
  58. except pyodbc.Error as e:
  59. print(e)
  60. if t != "":
  61. res[t] = []
  62. crsr = connect()
  63. json.dump(res, open("pkeys.json", "w"), indent=2)
  64. def fkeys():
  65. crsr = connect()
  66. for t in tables:
  67. try:
  68. cols = crsr.foreignKeys(table=t)
  69. print([x[0] for x in crsr.description])
  70. if res.get(t) is None:
  71. res[t] = []
  72. res[t].append([convert_desc2(c) for c in cols])
  73. crsr.cancel()
  74. except pyodbc.Error as e:
  75. print(e)
  76. if t != "":
  77. res[t] = []
  78. crsr = connect()
  79. def tables_create():
  80. for t, cols in res.items():
  81. with open("../sql_load/schema/AUTOLINE/tables/import." + t + ".sql", "w") as wh:
  82. wh.write(f"CREATE TABLE [import].[{t}] (\n")
  83. wh.write(",\n".join(cols))
  84. wh.write(")\n\nGO\n")
  85. def schema():
  86. tables_cols()
  87. pkeys()
  88. # fkeys()
  89. tables_create()