如何使用基于json的python创建新表并向表中插入数据
我正在尝试创建一个新的数据库表,并将数据插入该表中。例如,我有两个数据表,分别是表列名和列数据类型。我想创建这两个表并插入数据以更正表名。我使用了带有连接参数的python函数。请求你的建议来实现这一点 json数据 Python代码 嘿, 我已经对它进行了修改和测试。 以下是重构代码:如何使用基于json的python创建新表并向表中插入数据,python,json,psql,Python,Json,Psql,我正在尝试创建一个新的数据库表,并将数据插入该表中。例如,我有两个数据表,分别是表列名和列数据类型。我想创建这两个表并插入数据以更正表名。我使用了带有连接参数的python函数。请求你的建议来实现这一点 json数据 Python代码 嘿, 我已经对它进行了修改和测试。 以下是重构代码: import json import psycopg2 from psycopg2 import sql from psycopg2.extensions import ISOLATION_LEVEL_AUT
import json
import psycopg2
from psycopg2 import sql
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def create_table(data_JSON):
try:
filedata = json.loads(data_JSON)
db_name = filedata[0]['dbName']
print(db_name)
con = psycopg2.connect(user='postgres', host='127.0.0.1', database = db_name, password='password')
print ("Opened database successfully")
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = con.cursor()
table_names = filedata[0]['tableName']
table_columns = filedata[0]['tableColumn']
table_datum = filedata[0]['tableData']
for table_name, table_column, table_data in zip(table_names, table_columns, table_datum):
column_to_insert = ""
for td in table_column:
column_to_insert += td['title'] + " " + td['type'] +", "
column_to_insert = column_to_insert[:len(column_to_insert) - 2]
cur.execute(f"CREATE TABLE {table_name} ({column_to_insert});")
for row in table_data:
keys = ", ".join(row.keys())
values = ""
for val in row.values():
values += f"'{val}', "
values = values[: len(values) - 2]
print(keys, values)
cur.execute(f"INSERT INTO {table_name} ({keys}) VALUES ({values});")
con.commit()
cur.close()
con.close()
except Exception as e:
print(e)
return False
return True
这是一些代码。如果你遇到错误,写下它:)我没有因此得到任何错误。因为你没有打印它,尝试修改
异常除外:将False
返回到异常除外为e:print(e)
现在也显示错误…验证你的数据库,那么为什么这段代码可以工作?使用给定的JSON数据进行测试。如果它没有回答你的问题,那么它就不理解你的问题。它绝对像我尝试的那样起作用。你让我开心。谢谢@sindbadaways欢迎兄弟。
import json
import psycopg2
from psycopg2 import sql
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def create_table(data_JSON):
try:
filedata = json.loads(data_JSON)
db_name = filedata[0] ['dbname']
con = psycopg2.connect(user='postgres', host='127.0.0.1', database = db_name, password='test@123')
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = con.cursor()
for sheet in data_dict[0]['tableName']:
sheet_name = sheet
for z in data_dict[0]['tableColumn']:
for column in z[0][title]:
column_name = column
for dtype in z[0]['type']:
datatype = dtype
cur.execute(f"CREATE TABLE {sheet_name}({column_name} {data_type});")
for key,value in data_dict[0]['tableData']:
cur.execute(f"INSERT INTO {sheet_name}({column_name})VALUES ({value});")
con.commit()
cur.close()
con.close()
except Exception:
return False
return True
import json
import psycopg2
from psycopg2 import sql
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
def create_table(data_JSON):
try:
filedata = json.loads(data_JSON)
db_name = filedata[0]['dbName']
print(db_name)
con = psycopg2.connect(user='postgres', host='127.0.0.1', database = db_name, password='password')
print ("Opened database successfully")
con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cur = con.cursor()
table_names = filedata[0]['tableName']
table_columns = filedata[0]['tableColumn']
table_datum = filedata[0]['tableData']
for table_name, table_column, table_data in zip(table_names, table_columns, table_datum):
column_to_insert = ""
for td in table_column:
column_to_insert += td['title'] + " " + td['type'] +", "
column_to_insert = column_to_insert[:len(column_to_insert) - 2]
cur.execute(f"CREATE TABLE {table_name} ({column_to_insert});")
for row in table_data:
keys = ", ".join(row.keys())
values = ""
for val in row.values():
values += f"'{val}', "
values = values[: len(values) - 2]
print(keys, values)
cur.execute(f"INSERT INTO {table_name} ({keys}) VALUES ({values});")
con.commit()
cur.close()
con.close()
except Exception as e:
print(e)
return False
return True