db_schema.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. import pyodbc
  2. import json
  3. def connect():
  4. c = pyodbc.connect("DSN=Autoline_direkt64;UID=kcc;PWD=kcc123")
  5. return c.cursor()
  6. def convert_desc(col):
  7. nullable = "NULL" if col.nullable == 1 else "NOT NULL"
  8. if col.type_name == 'CHAR':
  9. return f' [{col.column_name}] [varchar]({col.length}) {nullable}'
  10. if col.type_name == 'HEX':
  11. return f' [{col.column_name}] [binary]({col.length}) {nullable}'
  12. if col.type_name == 'INTEGER':
  13. return f' [{col.column_name}] [int] {nullable}'
  14. if col.type_name == 'DATE':
  15. return f' [{col.column_name}] [datetime] {nullable}'
  16. if col.type_name == 'NUMERIC':
  17. return f' [{col.column_name}] [decimal]({col.length},{col.scale}) {nullable}'
  18. if col.type_name == 'BIT':
  19. return f' [{col.column_name}] [boolean] {nullable}'
  20. if col.type_name == 'MEMO':
  21. return f' [{col.column_name}] [text] {nullable}'
  22. return ", ".join(list(map(str, col)))
  23. def convert_desc2(col):
  24. return ", ".join(list(map(str, col)))
  25. # table_name = [x[2] for x in crsr.tables(tableType='VIEW')]
  26. # open("views.txt", "w").write("\n".join(table_name))
  27. with open("tables.txt", "r") as rh:
  28. tables = rh.read().split("\n")
  29. res = {}
  30. def tables_cols():
  31. crsr = connect()
  32. for t in tables:
  33. try:
  34. cols = crsr.columns(table=t)
  35. # print([x[0] for x in crsr.description])
  36. res[t] = [convert_desc(c) for c in cols]
  37. crsr.cancel()
  38. except pyodbc.Error as e:
  39. print(e)
  40. if t != '':
  41. res[t] = []
  42. crsr = connect()
  43. json.dump(res, open("schema.json", "w"), indent=2)
  44. def pkeys():
  45. crsr = connect()
  46. for t in tables:
  47. try:
  48. cols = crsr.primaryKeys(table=t)
  49. # print([x[0] for x in crsr.description])
  50. if res.get(t) is None:
  51. res[t] = []
  52. res[t].append(
  53. f" CONSTRAINT [{t}$0] PRIMARY KEY CLUSTERED ([" + "], [".join([c.column_name for c in cols]) + "])")
  54. crsr.cancel()
  55. except pyodbc.Error as e:
  56. print(e)
  57. if t != '':
  58. res[t] = []
  59. crsr = connect()
  60. json.dump(res, open("pkeys.json", "w"), indent=2)
  61. def fkeys():
  62. crsr = connect()
  63. for t in tables:
  64. try:
  65. cols = crsr.foreignKeys(table=t)
  66. print([x[0] for x in crsr.description])
  67. if res.get(t) is None:
  68. res[t] = []
  69. res[t].append([convert_desc2(c) for c in cols])
  70. crsr.cancel()
  71. except pyodbc.Error as e:
  72. print(e)
  73. if t != '':
  74. res[t] = []
  75. crsr = connect()
  76. def tables_create():
  77. for t, cols in res.items():
  78. with open("../sql_load/schema/AUTOLINE/tables/import." + t + ".sql", "w") as wh:
  79. wh.write(f"CREATE TABLE [import].[{t}] (\n")
  80. wh.write(",\n".join(cols))
  81. wh.write(")\n\nGO\n")
  82. if __name__ == '__main__':
  83. tables_cols()
  84. pkeys()
  85. # fkeys()
  86. tables_create()