fix for sending bigint from backend + restore db from file, table item, TODO: other tables

This commit is contained in:
Sam Hadow 2024-03-03 00:11:59 +01:00
parent 4f069bfb20
commit 960fbf80fb

View File

@ -381,6 +381,12 @@ def fetch_all():
cursor.close()
connection.commit()
connection.close()
# bigint fix (JS frontend) skuid in table item
converted_rows = []
for row in results[0]:
converted_row = (row[0], row[1], str(row[2]), row[3], row[4], row[5], row[6])
converted_rows.append(converted_row)
results[0] = converted_rows
return results
def restore_db(data):
@ -389,10 +395,20 @@ def restore_db(data):
connection = connect_db()
cursor = connection.cursor()
for elem in data["item"]:
print(elem)
# check if item already exists
query = f'SELECT * FROM item WHERE itemid={str(elem["itemid"])} and skuid={str(elem["skuid"])}'
cursor.execute(query)
result = cursor.rowcount
if result == 0:
attributes = str(elem["attributes"]).split(',') if len(str(elem["attributes"]))>0 else "[]::text[]"
query = f'INSERT INTO item (uuid, itemid, skuid, choice, attributes, image, show) VALUES (nextval(\'uuid_sequence\'), {str(elem["itemid"])}, {str(elem["skuid"])}, {str(elem["choice"])}, ARRAY{attributes}, \'{str(elem["image"])}\', {str(elem["show"])})'
cursor.execute(query)
for table in tables:
for elem in data[table]:
print(elem)
cursor.close()
connection.commit()
connection.close()
def export_csv():
'''join item and history data from database and export it in ./output.csv'''