Add various GDScript, core, GUI and physics benchmarks

- Make idle and physics time measurements somewhat functional,
  given what the engine exposes.
This commit is contained in:
Hugo Locurcio
2023-05-04 12:05:26 +02:00
parent 5436c128a0
commit 64b7a69e5b
17 changed files with 862 additions and 46 deletions

View File

@@ -76,13 +76,15 @@ This argument also supports globbing and can be used at the same time as `--incl
For each benchmark, the project will track how long the main thread spent setting up the scene,
then run the scene for five seconds and log the average per-frame statistics.
(All times given are in milliseconds.)
(All times given are in milliseconds. Lower values are better.)
- **Render CPU:** Average CPU time spent per frame
- **Render GPU:** Average GPU time spent per frame
- **Idle:** Currently unimplemented
- **Physics:** Currently unimplemented
- **Main Thread Time:** Time spent setting up the scene on the main thread
- **Render CPU:** Average CPU time spent rendering each frame (such as setting up draw calls).
This metric does *not* take process/physics process functions into account.
- **Render GPU:** Average GPU time spent per frame.
- **Idle:** Average CPU time spent in C++ and GDScript process functions per second.
- **Physics:** Average CPU time spent in C++ and GDScript physics process functions per second.
- **Main Thread Time:** Time spent setting up the scene on the main thread.
For rendering benchmarks, this acts as a loading time measurement.
Note that not all benchmarks involve running a scene (for example, GDScript benchmarks).
In those cases, per-frame statistics will not be recorded,

View File

@@ -0,0 +1,27 @@
extends Benchmark
signal emitter
const ITERATIONS = 2_000_000
func function_callable() -> void:
pass
func benchmark_function_callable() -> void:
for i in ITERATIONS:
function_callable.call()
func benchmark_lambda_inline_callable() -> void:
for i in ITERATIONS:
(func lambda_callable() -> void: pass).call()
func benchmark_lambda_variable_callable() -> void:
var variable_callable := \
func lambda_callable() -> void:
pass
for i in ITERATIONS:
variable_callable.call()

33
benchmarks/core/crypto.gd Normal file
View File

@@ -0,0 +1,33 @@
extends Benchmark
const ITERATIONS = 100_000
var crypto := Crypto.new()
func benchmark_generate_10_random_bytes() -> void:
for i in ITERATIONS:
crypto.generate_random_bytes(10)
func benchmark_generate_1k_random_bytes() -> void:
for i in ITERATIONS:
crypto.generate_random_bytes(1000)
func benchmark_generate_1m_random_bytes() -> void:
for i in ITERATIONS:
crypto.generate_random_bytes(1_000_000)
func benchmark_generate_1g_random_bytes() -> void:
for i in ITERATIONS:
crypto.generate_random_bytes(1_000_000_000)
func benchmark_generate_rsa_2048() -> void:
crypto.generate_rsa(2048)
func benchmark_generate_rsa_4096() -> void:
crypto.generate_rsa(4096)

View File

@@ -0,0 +1,92 @@
extends Benchmark
const ITERATIONS = 10_000_000
const RANDOM_SEED = preload("res://main.gd").RANDOM_SEED
var rng := RandomNumberGenerator.new()
func benchmark_global_scope_randi() -> void:
# Reset the random seed to improve reproducibility of this benchmark.
seed(RANDOM_SEED)
for i in ITERATIONS:
randi()
# Reset the random seed again to improve reproducibility of other benchmarks.
seed(RANDOM_SEED)
func benchmark_randi() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randi()
rng.seed = RANDOM_SEED
func benchmark_global_scope_randf() -> void:
seed(RANDOM_SEED)
for i in ITERATIONS:
randf()
seed(RANDOM_SEED)
func benchmark_randf() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randf()
rng.seed = RANDOM_SEED
func benchmark_global_scope_randi_range() -> void:
seed(RANDOM_SEED)
for i in ITERATIONS:
randi_range(1234, 5678)
seed(RANDOM_SEED)
func benchmark_randi_range() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randi_range(1234, 5678)
rng.seed = RANDOM_SEED
func benchmark_global_scope_randf_range() -> void:
seed(RANDOM_SEED)
for i in ITERATIONS:
randf_range(1234.0, 5678.0)
seed(RANDOM_SEED)
func benchmark_randf_range() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randf_range(1234.0, 5678.0)
rng.seed = RANDOM_SEED
func benchmark_global_scope_randfn() -> void:
seed(RANDOM_SEED)
for i in ITERATIONS:
randfn(10.0, 2.0)
seed(RANDOM_SEED)
func benchmark_randfn() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randfn(10.0, 2.0)
rng.seed = RANDOM_SEED
func benchmark_global_scope_randomize() -> void:
seed(RANDOM_SEED)
for i in ITERATIONS:
randomize()
seed(RANDOM_SEED)
func benchmark_randomize() -> void:
rng.seed = RANDOM_SEED
for i in ITERATIONS:
rng.randomize()
rng.seed = RANDOM_SEED

View File

@@ -1,12 +1,39 @@
extends Benchmark
signal emitter
signal emitter_params_1(arg1)
signal emitter_params_10(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10)
func on_emit():
const ITERATIONS = 1_000_000
func on_emit() -> void:
pass
func benchmark_emission():
func on_emit_params_1(_arg1) -> void:
pass
func on_emit_params_10(_arg1, _arg2, _arg3, _arg4, _arg5, _arg6, _arg7, _arg8, _arg9, _arg10) -> void:
pass
func benchmark_emission_params_0() -> void:
emitter.connect(on_emit)
for i in 1000_000:
for i in ITERATIONS:
emitter.emit()
emitter.disconnect(on_emit)
func benchmark_emission_params_1() -> void:
emitter_params_1.connect(on_emit_params_1)
for i in ITERATIONS:
emitter_params_1.emit(i)
emitter_params_1.disconnect(on_emit_params_1)
func benchmark_emission_params_10() -> void:
emitter_params_10.connect(on_emit_params_10)
for i in ITERATIONS:
emitter_params_10.emit(i, i, i, i, i, i, i, i, i, i)
emitter_params_10.disconnect(on_emit_params_10)

View File

@@ -3,7 +3,7 @@ extends Benchmark
const ITERATIONS = 100_000
func benchmark_deep_tree():
func benchmark_deep_tree() -> void:
var rt := Node.new()
for i in ITERATIONS:
var n := Node.new()
@@ -18,13 +18,13 @@ func benchmark_deep_tree():
rt = n
rt.free()
func benchmark_wide_tree():
func benchmark_wide_tree() -> void:
var rt := Node.new()
for i in ITERATIONS:
rt.add_child(Node.new())
rt.free()
func benchmark_fragmentation():
func benchmark_fragmentation() -> void:
var top := Node.new()
for i in 5:
top.add_child(Node.new())
@@ -45,7 +45,7 @@ func benchmark_fragmentation():
top.free()
func benchmark_duplicate():
func benchmark_duplicate() -> void:
var rt := Node.new()
for i in 16:
var n := Node.new()

View File

@@ -1,57 +1,220 @@
extends Benchmark
const ITERATIONS = 80_000
const ITERATIONS = 2_000_000
# Benchmark various array types by:
# 1) Inserting ITERATIONS elements from the back
# 2) Updating all ITERATIONS elements
# 3) Popping all ITERATIONS elements from the front (non-constant complexity)
func benchmark_typed_int_array():
func benchmark_packed_int32_array() -> void:
var array := PackedInt32Array()
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_int64_array() -> void:
var array := PackedInt64Array()
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_typed_int_array() -> void:
var array: Array[int] = []
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0
for i in ITERATIONS:
array.remove_at(0)
array.remove_at(array.size() - 1)
func benchmark_untyped_int_array():
func benchmark_untyped_int_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0
for i in ITERATIONS:
array.remove_at(0)
array.remove_at(array.size() - 1)
func benchmark_packed_string_array():
func benchmark_packed_float32_array() -> void:
var array := PackedFloat32Array()
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0.0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_float64_array() -> void:
var array := PackedFloat64Array()
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0.0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_typed_float_array() -> void:
var array: Array[float] = []
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0.0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_untyped_float_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(i)
for i in ITERATIONS:
array[i] = 0.0
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_vector2_array() -> void:
var array := PackedVector2Array()
for i in ITERATIONS:
array.push_back(Vector2(i, i))
for i in ITERATIONS:
array[i] = Vector2.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_typed_vector2_array() -> void:
var array: Array[Vector2] = []
for i in ITERATIONS:
array.push_back(Vector2(i, i))
for i in ITERATIONS:
array[i] = Vector2.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_untyped_vector2_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(Vector2(i, i))
for i in ITERATIONS:
array[i] = Vector2.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_vector3_array() -> void:
var array := PackedVector3Array()
for i in ITERATIONS:
array.push_back(Vector3(i, i, i))
for i in ITERATIONS:
array[i] = Vector3.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_typed_vector3_array() -> void:
var array: Array[Vector3] = []
for i in ITERATIONS:
array.push_back(Vector3(i, i, i))
for i in ITERATIONS:
array[i] = Vector3.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_untyped_vector3_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(Vector3(i, i, i))
for i in ITERATIONS:
array[i] = Vector3.ZERO
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_color_array() -> void:
var array := PackedColorArray()
for i in ITERATIONS:
array.push_back(Color(i, i, i, 1.0))
for i in ITERATIONS:
array[i] = Color.BLACK
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_typed_color_array() -> void:
var array: Array[Color] = []
for i in ITERATIONS:
array.push_back(Color(i, i, i, 1.0))
for i in ITERATIONS:
array[i] = Color.BLACK
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_untyped_color_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(Color(i, i, i, 1.0))
for i in ITERATIONS:
array[i] = Color.BLACK
for i in ITERATIONS:
array.remove_at(array.size() - 1)
func benchmark_packed_string_array() -> void:
var array := PackedStringArray()
for i in ITERATIONS:
array.push_back(str("Godot ", i))
for i in ITERATIONS:
array[i] = ""
for i in ITERATIONS:
array.remove_at(0)
array.remove_at(array.size() - 1)
func benchmark_typed_string_array():
func benchmark_typed_string_array() -> void:
var array: Array[String] = []
for i in ITERATIONS:
array.push_back(str("Godot ", i))
for i in ITERATIONS:
array[i] = ""
for i in ITERATIONS:
array.remove_at(0)
array.remove_at(array.size() - 1)
func benchmark_untyped_string_array():
func benchmark_untyped_string_array() -> void:
var array = []
for i in ITERATIONS:
array.push_back(str("Godot ", i))
for i in ITERATIONS:
array[i] = ""
for i in ITERATIONS:
array.remove_at(0)
array.remove_at(array.size() - 1)
func benchmark_fill_loop() -> void:
var array = []
array.resize(10_000_000)
for i in array.size():
array[i] = 1234
func benchmark_fill_method() -> void:
var array = []
array.resize(10_000_000)
array.fill(1234)

View File

@@ -0,0 +1,65 @@
extends Benchmark
const ITERATIONS = 1_000_000
const LOREM_IPSUM = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."
# Benchmark computation of checksums on a string.
func benchmark_md5_buffer_empty() -> void:
for i in ITERATIONS:
"".md5_buffer()
func benchmark_md5_buffer_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.md5_buffer()
func benchmark_sha1_buffer_empty() -> void:
for i in ITERATIONS:
"".sha1_buffer()
func benchmark_sha1_buffer_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.sha1_buffer()
func benchmark_sha256_buffer_empty() -> void:
for i in ITERATIONS:
"".sha256_buffer()
func benchmark_sha256_buffer_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.sha256_buffer()
func benchmark_md5_text_empty() -> void:
for i in ITERATIONS:
"".md5_text()
func benchmark_md5_text_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.md5_text()
func benchmark_sha1_text_empty() -> void:
for i in ITERATIONS:
"".sha1_text()
func benchmark_sha1_text_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.sha1_text()
func benchmark_sha256_text_empty() -> void:
for i in ITERATIONS:
"".sha256_text()
func benchmark_sha256_text_non_empty() -> void:
for i in ITERATIONS:
LOREM_IPSUM.sha256_text()

View File

@@ -0,0 +1,71 @@
extends Benchmark
const ITERATIONS = 1_000_000
const ENGINE_NAME = "Godot"
const FORMAT_DICT = {engine = ENGINE_NAME}
var engine_name := "Godot"
var some_integer := 123456
var some_float := 1.2
var some_vector2i := Vector2i(12, 34)
# Benchmark various ways to format strings.
func benchmark_no_op_constant_method() -> void:
for i in ITERATIONS:
"Hello nothing!".format({})
func benchmark_simple_constant_concatenate() -> void:
for i in ITERATIONS:
"Hello " + ENGINE_NAME + "!"
func benchmark_simple_constant_percent() -> void:
for i in ITERATIONS:
"Hello %s!" % ENGINE_NAME
func benchmark_simple_constant_method() -> void:
for i in ITERATIONS:
"Hello {engine}!".format({engine = ENGINE_NAME})
func benchmark_simple_constant_method_constant_dict() -> void:
for i in ITERATIONS:
"Hello {engine}!".format(FORMAT_DICT)
func benchmark_simple_variable_concatenate() -> void:
for i in ITERATIONS:
"Hello " + engine_name + "!"
func benchmark_simple_variable_percent() -> void:
for i in ITERATIONS:
"Hello %s!" % engine_name
func benchmark_simple_variable_method() -> void:
for i in ITERATIONS:
"Hello {engine}!".format({engine = engine_name})
func benchmark_complex_variable_concatenate() -> void:
for i in ITERATIONS:
"Hello " + engine_name + "!\nA few examples of formatting: " + str(some_integer) + ", " + str(some_float).pad_decimals(2) + ", " + str(some_vector2i)
func benchmark_complex_variable_percent() -> void:
for i in ITERATIONS:
"Hello %s!\nA few examples of formatting: %d, %.2f, %v" % [engine_name, some_integer, some_float, some_vector2i]
func benchmark_complex_variable_method() -> void:
for i in ITERATIONS:
"Hello {engine}!\nA few examples of formatting: {an_integer}, {a_float}, {a_vector2}".format({
engine = engine_name,
an_integer = some_integer,
a_float = str(some_float).pad_decimals(2),
a_vector2i = some_vector2i,
})

View File

@@ -0,0 +1,234 @@
extends Benchmark
const ITERATIONS = 1_000_000
# Benchmark various ways to modify strings.
func benchmark_begins_with() -> void:
for i in ITERATIONS:
"Godot Engine".begins_with("Godot") # true
func benchmark_ends_with() -> void:
for i in ITERATIONS:
"Godot Engine".ends_with("Engine") # true
func benchmark_count() -> void:
for i in ITERATIONS:
"Godot Engine".count("o") # 2
func benchmark_countn() -> void:
for i in ITERATIONS:
"Godot Engine".countn("o") # 2
func benchmark_contains() -> void:
for i in ITERATIONS:
"Godot Engine".contains("o") # true
func benchmark_contains_gdscript_in() -> void:
for i in ITERATIONS:
"o" in "Godot Engine" # true
func benchmark_find() -> void:
for i in ITERATIONS:
"Godot Engine".find("o") # 1
func benchmark_findn() -> void:
for i in ITERATIONS:
"Godot Engine".findn("o") # 1
func benchmark_rfind() -> void:
for i in ITERATIONS:
"Godot Engine".rfind("o") # 3
func benchmark_rfindn() -> void:
for i in ITERATIONS:
"Godot Engine".rfindn("o") # 3
func benchmark_substr() -> void:
for i in ITERATIONS:
"Hello Godot!".substr(6, 5) # "Godot"
func benchmark_insert() -> void:
for i in ITERATIONS:
"Hello !".insert(6, "Godot") # "Hello Godot!"
func benchmark_get_slice() -> void:
for i in ITERATIONS:
"1234,5678,90.12".get_slice(",", 1) # "5678"
func benchmark_get_slice_count() -> void:
for i in ITERATIONS:
"1234,5678,90.12".get_slice_count(",") # 3
func benchmark_bigrams() -> void:
for i in ITERATIONS:
"Godot Engine".bigrams() # ["Go", "od", "do", "ot", "t ", " E", "En", "ng", "gi", "in", "ne"]
func benchmark_split() -> void:
for i in ITERATIONS:
"1234,5678,90.12".split(",") # ["1234", "5678", "90.12"]
func benchmark_rsplit() -> void:
for i in ITERATIONS:
"1234,5678,90.12".rsplit(",") # ["1234", "5678", "90.12"]
func benchmark_split_floats() -> void:
for i in ITERATIONS:
"1234,5678,90.12".split_floats(",") # [1234.0, 5678.0, 90.12]
func benchmark_pad_zeros_pre_constructed() -> void:
for i in ITERATIONS:
"12345".pad_zeros(7) # "0012345"
func benchmark_pad_zeros() -> void:
for i in ITERATIONS:
str(12345).pad_zeros(7) # "0012345"
func benchmark_pad_decimals_pre_constructed() -> void:
for i in ITERATIONS:
"1234.5678".pad_decimals(2) # "1234.56"
func benchmark_pad_decimals() -> void:
for i in ITERATIONS:
str(1234.5678).pad_decimals(2) # "1234.56"
func benchmark_lpad() -> void:
for i in ITERATIONS:
"Godot".lpad(7, "+") # "++Godot"
func benchmark_rpad() -> void:
for i in ITERATIONS:
"Godot".rpad(7, "+") # "Godot++"
func benchmark_similarity() -> void:
for i in ITERATIONS:
"Godot".similarity("Engine")
func benchmark_simplify_path() -> void:
for i in ITERATIONS:
"./path/to///../file".simplify_path() # "path/file"
func benchmark_capitalize() -> void:
for i in ITERATIONS:
"godot_engine_demo".capitalize() # "Godot Engine Demo"
func benchmark_to_snake_case() -> void:
for i in ITERATIONS:
"GodotEngineDemo".to_snake_case() # "godot_engine_demo"
func benchmark_to_camel_case() -> void:
for i in ITERATIONS:
"godot_engine_demo".to_snake_case() # "godotEngineDemo"
func benchmark_to_pascal_case() -> void:
for i in ITERATIONS:
"godot_engine_demo".to_pascal_case() # "GodotEngineDemo"
func benchmark_to_lower() -> void:
for i in ITERATIONS:
"Godot Engine Demo".to_lower() # "godot engine demo"
func benchmark_uri_decode() -> void:
for i in ITERATIONS:
"Godot%20Engine%3Adocs".uri_decode() # "Godot Engine:docs"
func benchmark_uri_encode() -> void:
for i in ITERATIONS:
"Godot Engine:docs".uri_encode() # "Godot%20Engine%3Adocs"
func benchmark_xml_escape() -> void:
for i in ITERATIONS:
"Godot Engine <&>".xml_escape() # "Godot Engine &lt;&amp;&gt;"
func benchmark_xml_unescape() -> void:
for i in ITERATIONS:
"Godot Engine &lt;&amp;&gt;".xml_unescape() # "Godot Engine <&>"
func benchmark_humanize_size() -> void:
for i in ITERATIONS:
String.humanize_size(123456) # 120.5 KB
func benchmark_is_valid_filename() -> void:
for i in ITERATIONS:
"Godot Engine: Demo.exe".is_valid_filename() # false
func benchmark_validate_filename() -> void:
for i in ITERATIONS:
"Godot Engine: Demo.exe".validate_filename() # "Godot Engine_ Demo.exe"
func benchmark_validate_node_name() -> void:
for i in ITERATIONS:
"TestNode:123456".validate_node_name() # "TestNode123456"
func benchmark_casecmp_to() -> void:
for i in ITERATIONS:
"2 Example".casecmp_to("10 Example") # 1
func benchmark_nocasecmp_to() -> void:
for i in ITERATIONS:
"2 Example".nocasecmp_to("10 Example") # 1
func benchmark_naturalnocasecmp_to() -> void:
for i in ITERATIONS:
"2 Example".naturalnocasecmp_to("10 Example") # -1
func benchmark_to_utf8_buffer() -> void:
for i in ITERATIONS:
"Godot Engine".to_utf8_buffer()
func benchmark_to_utf16_buffer() -> void:
for i in ITERATIONS:
"Godot Engine".to_utf16_buffer()
func benchmark_to_utf32_buffer() -> void:
for i in ITERATIONS:
"Godot Engine".to_utf32_buffer()
func benchmark_to_wchar_buffer() -> void:
for i in ITERATIONS:
"Godot Engine".to_wchar_buffer()

View File

@@ -0,0 +1,69 @@
extends Benchmark
var box_shape := BoxShape3D.new()
var sphere_shape := SphereShape3D.new()
func _init() -> void:
test_physics = true
test_idle = true
func setup_scene(create_body_func: Callable, unique_shape: bool, num_shapes: int) -> Node3D:
var scene_root := Node3D.new()
var camera := Camera3D.new()
camera.position.y = 0.3
camera.position.z = 1.0
camera.rotate_x(-0.8)
scene_root.add_child(camera)
for _i in num_shapes:
var box: RigidBody3D = create_body_func.call(unique_shape)
box.position.x = randf_range(-50, 50)
box.position.z = randf_range(-50, 50)
scene_root.add_child(box)
return scene_root
func create_box(unique_shape: bool) -> RigidBody3D:
var rigid_body := RigidBody3D.new()
var collision_shape := CollisionShape3D.new()
if unique_shape:
collision_shape.shape = BoxShape3D.new()
else:
# Reuse existing shape.
collision_shape.shape = box_shape
rigid_body.add_child(collision_shape)
return rigid_body
func create_sphere(unique_shape: bool) -> RigidBody3D:
var rigid_body := RigidBody3D.new()
var collision_shape := CollisionShape3D.new()
if unique_shape:
collision_shape.shape = SphereShape3D.new()
else:
# Reuse existing shape.
collision_shape.shape = sphere_shape
rigid_body.add_child(collision_shape)
return rigid_body
func benchmark_7500_rigid_body_3d_shared_box_shape() -> Node3D:
return setup_scene(create_box, false, 7500)
func benchmark_7500_rigid_body_3d_unique_box_shape() -> Node3D:
return setup_scene(create_box, true, 7500)
func benchmark_7500_rigid_body_3d_shared_sphere_shape() -> Node3D:
return setup_scene(create_sphere, false, 7500)
func benchmark_7500_rigid_body_3d_unique_sphere_shape() -> Node3D:
return setup_scene(create_sphere, true, 7500)

View File

@@ -134,7 +134,6 @@ func benchmark_cull_fast() -> Node3D:
# Smoke test
func benchmark_aaa_setup():
for i in 20:
print("Iteration ", i)
var tmp := Node3D.new()
attach_model_recursive(tmp, NUMBER_OF_OBJECTS)
tmp.free()

View File

@@ -0,0 +1,37 @@
extends Benchmark
const LOREM_IPSUM = "Ullam dolorum consequuntur minima neque soluta ab. Est dolores officiis reiciendis omnis eum inventore. Laboriosam quis magni asperiores officia fuga animi qui. Quisquam repudiandae non quisquam vero aut id. Voluptatem occaecati fugiat officia distinctio omnis nesciunt occaecati."
const FONT_SIZE = 9
# Defined to make the label span the entire viewport.
var label_size := Vector2(ProjectSettings.get_setting("display/window/size/viewport_width"), ProjectSettings.get_setting("display/window/size/viewport_height"))
func _init() -> void:
test_render_cpu = true
test_render_gpu = true
func benchmark_label() -> Label:
var label = Label.new()
label.add_theme_font_size_override("font_size", FONT_SIZE)
label.size = label_size
label.text = LOREM_IPSUM.repeat(100)
return label
func benchmark_label_autowrap_arbitrary() -> Label:
var label := benchmark_label()
label.autowrap_mode = TextServer.AUTOWRAP_ARBITRARY
return label
func benchmark_label_autowrap_word() -> Label:
var label := benchmark_label()
label.autowrap_mode = TextServer.AUTOWRAP_WORD
return label
func benchmark_label_autowrap_smart() -> Label:
var label = benchmark_label()
label.autowrap_mode = TextServer.AUTOWRAP_WORD_SMART
return label

View File

@@ -1,5 +1,7 @@
extends Panel
const RANDOM_SEED = 0x60d07
var items := []
# Prefix variables with `arg_` to have them automatically be parsed from command line arguments
@@ -10,10 +12,9 @@ var arg_run_benchmarks := false
@onready var tree := $Tree as Tree
func _ready() -> void:
# Use a fixed random seed to improve reproducibility of results.
seed(0x60d07)
seed(RANDOM_SEED)
# Parse valid command-line arguments of the form `--key=value` into member variables.
for argument in OS.get_cmdline_user_args():
@@ -111,8 +112,7 @@ func _on_SelectNone_pressed() -> void:
func _on_CopyJSON_pressed() -> void:
var json := JSON.new()
DisplayServer.clipboard_set(json.stringify(Manager.get_results_dict(), "\t"))
DisplayServer.clipboard_set(JSON.stringify(Manager.get_results_dict(), "\t"))
func _on_Run_pressed() -> void:

View File

@@ -143,16 +143,19 @@ func run_test(test_id: TestID) -> void:
results.render_cpu += RenderingServer.viewport_get_measured_render_time_cpu(get_tree().root.get_viewport_rid()) + RenderingServer.get_frame_setup_time_cpu()
results.render_gpu += RenderingServer.viewport_get_measured_render_time_gpu(get_tree().root.get_viewport_rid())
results.idle += 0.0
results.physics += 0.0
# Godot updates idle and physics performance monitors only once per second,
# with the value representing the average time spent processing idle/physics process in the last second.
# The value is in seconds, not milliseconds.
# Keep the highest reported value throughout the run.
results.idle = maxf(results.idle, Performance.get_monitor(Performance.TIME_PROCESS) * 1000)
results.physics = maxf(results.physics, Performance.get_monitor(Performance.TIME_PHYSICS_PROCESS) * 1000)
frames_captured += 1
results.render_cpu /= float(max(1.0, float(frames_captured)))
results.render_gpu /= float(max(1.0, float(frames_captured)))
results.idle /= float(max(1.0, float(frames_captured)))
results.physics /= float(max(1.0, float(frames_captured)))
# Don't divide `results.idle` and `results.physics` since these are already
# metrics calculated on a per-second basis.
for metric in results.get_property_list():
if benchmark_script.get("test_" + metric.name) == false: # account for null

View File

@@ -8,27 +8,21 @@
config_version=5
_global_script_classes=[{
"base": "Label",
"class": &"Benchmark",
"language": &"GDScript",
"path": "res://benchmark.gd"
}]
_global_script_class_icons={
"Benchmark": ""
}
[application]
config/name="Godot Benchmarks"
run/main_scene="res://main.tscn"
config/features=PackedStringArray("4.0")
config/features=PackedStringArray("4.1")
config/icon="res://icon.png"
[autoload]
Manager="*res://manager.gd"
[debug]
gdscript/warnings/standalone_expression=0
[display]
window/size/viewport_width=1920