-
-
Notifications
You must be signed in to change notification settings - Fork 74
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Write speed of database seems extremely slow #66
Comments
Due to overhead of Godot and GDNative, I think you won't ever get the same speed as in the DB browser. Speed & performance can however be optimized quite a bit by ditching the for-loop in the example and swithing to the I'll do some tests and I'll get back to you with the speeds on my device. |
Hey @TempestStorm Here are the results on my device (all in milliseconds): Writing benchmarks:insert_row() : 23.75804 msec Reading benchmarks:select_rows() : 3.76736 msec I would expect the Here's the code I've used (could you run this on your device and report back?) extends Node
const N_EXPERIMENTS := 100
const SQLite = preload("res://addons/godot-sqlite/bin/gdsqlite.gdns")
var db_name := "user://test.db"
var file_name := "user://test.json"
var table_name := "company"
var ids := [1,2,3,4,5,6,7]
var names := ["Paul","Allen","Teddy","Mark","Robert","Julia","Amanda"]
var ages := [32,25,23,25,30,63,13]
var addresses := ["California","Texas","Baltimore","Richmond","Texas","Atlanta","New-York"]
var salaries := [20000.00,15000.00,20000.00,65000.00,65000.00,65000.00,65000.00]
var dir := Directory.new()
func _ready():
var insert_row_delta := 0.0
var insert_rows_delta := 0.0
var json_write_delta := 0.0
for _i in range(0, N_EXPERIMENTS):
insert_row_delta += insert_row_benchmark()
insert_rows_delta += insert_rows_benchmark()
json_write_delta += json_write_benchmark()
insert_row_delta = insert_row_delta/N_EXPERIMENTS
insert_rows_delta = insert_rows_delta/N_EXPERIMENTS
json_write_delta = json_write_delta/N_EXPERIMENTS
var select_rows_delta := 0.0
var json_read_delta := 0.0
for _i in range(0, N_EXPERIMENTS):
select_rows_delta += select_rows_benchmark()
json_read_delta += json_read_benchmark()
select_rows_delta = select_rows_delta/N_EXPERIMENTS
json_read_delta = json_read_delta/N_EXPERIMENTS
print(insert_row_delta)
print(insert_rows_delta)
print(json_write_delta)
print(select_rows_delta)
print(json_read_delta)
func insert_row_benchmark() -> int:
var start := OS.get_ticks_usec()
var db = SQLite.new()
db.path = db_name
db.open_db()
var table_dict : Dictionary = Dictionary()
table_dict["id"] = {"data_type":"int", "primary_key": true, "not_null": true}
table_dict["name"] = {"data_type":"text", "not_null": true}
table_dict["age"] = {"data_type":"int", "not_null": true}
table_dict["address"] = {"data_type":"char(50)"}
table_dict["salary"] = {"data_type":"real"}
db.create_table(table_name, table_dict)
var row_dict : Dictionary = Dictionary()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
# Insert a new row in the table
db.insert_row(table_name, row_dict)
row_dict.clear()
db.close_db()
var delta := OS.get_ticks_usec() - start
# Clean up the file
dir.remove(db_name)
return delta
func insert_rows_benchmark() -> int:
var start := OS.get_ticks_usec()
var db = SQLite.new()
db.path = db_name
db.open_db()
var table_dict : Dictionary = Dictionary()
table_dict["id"] = {"data_type":"int", "primary_key": true, "not_null": true}
table_dict["name"] = {"data_type":"text", "not_null": true}
table_dict["age"] = {"data_type":"int", "not_null": true}
table_dict["address"] = {"data_type":"char(50)"}
table_dict["salary"] = {"data_type":"real"}
db.create_table(table_name, table_dict)
var row_array : Array = []
var row_dict : Dictionary = Dictionary()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
row_array.append(row_dict.duplicate())
row_dict.clear()
db.insert_rows(table_name, row_array)
db.close_db()
var delta := OS.get_ticks_usec() - start
# Clean up the file
dir.remove(db_name)
return delta
func json_write_benchmark() -> int:
var start := OS.get_ticks_usec()
var file := File.new()
file.open(file_name, File.WRITE)
var row_array : Array = []
var row_dict : Dictionary = Dictionary()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
row_array.append(row_dict.duplicate())
row_dict.clear()
file.store_string(JSON.print(row_array, "/t"))
file.close()
var delta := OS.get_ticks_usec() - start
# Clean up the file
dir.remove(file_name)
return delta
func select_rows_benchmark() -> int:
var db = SQLite.new()
db.path = db_name
db.open_db()
var table_dict : Dictionary = Dictionary()
table_dict["id"] = {"data_type":"int", "primary_key": true, "not_null": true}
table_dict["name"] = {"data_type":"text", "not_null": true}
table_dict["age"] = {"data_type":"int", "not_null": true}
table_dict["address"] = {"data_type":"char(50)"}
table_dict["salary"] = {"data_type":"real"}
db.create_table(table_name, table_dict)
var row_array : Array = []
var row_dict : Dictionary = Dictionary()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
row_array.append(row_dict.duplicate())
row_dict.clear()
db.insert_rows(table_name, row_array)
db.close_db()
var start := OS.get_ticks_usec()
var db2 = SQLite.new()
db2.path = db_name
db2.open_db()
db2.select_rows(table_name, "name = 'Mark'", ["salary"])
db2.close_db()
var delta := OS.get_ticks_usec() - start
# Clean up the file
dir.remove(db_name)
return delta
func json_read_benchmark() -> int:
var file := File.new()
file.open(file_name, File.WRITE)
var row_array : Array = []
var row_dict : Dictionary = Dictionary()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
row_array.append(row_dict.duplicate())
row_dict.clear()
file.store_string(to_json(row_array))
file.close()
var start := OS.get_ticks_usec()
var file2 := File.new()
file2.open(file_name, File.READ)
var content = file2.get_as_text()
content = parse_json(content)
for entry in content:
if entry["name"] == "Mark":
break
file2.close()
var delta := OS.get_ticks_usec() - start
# Clean up the file
dir.remove(file_name)
return delta |
I'll check the same script in the new GDExtension API at some point to check if there's a significant increase in speed or not. |
So my benchmarks are: insert_row() : 1887988.3 I'm actually surprised how slowly my pc can write a json and I think this indicates issues on my end however: If we had a similar ratio I think I would conclude that it is something to do with file IO on my OS level and it still might be. Unfortunately I don't have access to a Windows computer at the moment, but I will try and run the benchmarks on other systems when I get the opportunity. Yesterday we changed our code to store the sqlite database in ":memory:" and export and load our world with json files. Although we haven't been able to implement any serious testing in that, starting with our test 250 records we found a speed increase orders of magnitude faster. I'm pretty ignorant when it comes to C++ but I did install Geany this morning so I'm interested in running a benchmark in C++ to see if the slowdown is coming through the godot translation layer. Is that similar to what you mean with the GDExtension API? |
For the record I'm running Pop!_OS 20.04 with the default Gnome desktop. |
Currently Godot SQLite uses the GDNative API, which has been shown by many users to be quite slow due to the wrapper/translation layer. The new and unreleased GDExtension of Godot 4.0 promises to be much faster. However, I dont know how much faster that would be. Regarding the speed on ur device, those benchmark numbers are in MICROseconds so your device is actually four times faster in reading and writing JSON than mine it seems. I would think that your issues (having a ratio of 17 thousand) might be specific to the Linux binaries, but I dont know to what extent. |
Today is July 18, 2022. Is there a new benchmark test? |
Hello again. I've recently been experimenting with sqlite in c++ and I have found that for whatever reason it is very slow with pop_os. However I think this might be mostly in the realm of user error on my part. SQLite seems to like to start up in a very conservative manner, and I've found that querying certain pragmas can speed things up a lot!
If you do re run the benchmarks it might be worth trying them with these! |
Environment:
Issue description:
We want to generate a database during the init of our game and save it to user://, however writing to the database is incredibly slow. We want to write potentially several thousand records but even writing as few as 250 makes our startup sequence takes over a minute compared to using JSON which would take less than a second. Further, writing these same 250 records using DB Browser only takes 3 milliseconds.
Even in the demo project that only writes 7 entries (for loop starting at line 260 of the database.gd file) can take a relatively long time.
Steps to reproduce:
Time the for loop on line 260 of the demo project. On our last run it took 943msecs and that was running the databases with verbose mode turned off
Minimal reproduction project:
`
var t1:int = OS.get_system_time_msecs()
for i in range(0,ids.size()):
row_dict["id"] = ids[i]
row_dict["name"] = names[i]
row_dict["age"] = ages[i]
row_dict["address"] = addresses[i]
row_dict["salary"] = salaries[i]
row_array.append(row_dict.duplicate())
`
Additional context
We have experimented with rolling all the queries into one query in order to batch query and this does help slightly however its still very slow.
I am aware that this might be an issue with just my system so I would encourage other people to post a benchmark to see if it is just me.
Thank you for your time.
The text was updated successfully, but these errors were encountered: