Merge pull request #50899 from akien-mga/refref
Use Ref<T> references as iterators where relevant
This commit is contained in:
commit
fab3412139
18 changed files with 62 additions and 83 deletions
|
@ -355,20 +355,20 @@ void _OS::print_all_textures_by_size() {
|
|||
List<Ref<Resource>> rsrc;
|
||||
ResourceCache::get_cached_resources(&rsrc);
|
||||
|
||||
for (Ref<Resource> E : rsrc) {
|
||||
if (!E->is_class("ImageTexture")) {
|
||||
for (Ref<Resource> &res : rsrc) {
|
||||
if (!res->is_class("ImageTexture")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Size2 size = E->call("get_size");
|
||||
int fmt = E->call("get_format");
|
||||
Size2 size = res->call("get_size");
|
||||
int fmt = res->call("get_format");
|
||||
|
||||
_OSCoreBindImg img;
|
||||
img.size = size;
|
||||
img.fmt = fmt;
|
||||
img.path = E->get_path();
|
||||
img.path = res->get_path();
|
||||
img.vram = Image::get_image_data_size(img.size.width, img.size.height, Image::Format(img.fmt));
|
||||
img.id = E->get_instance_id();
|
||||
img.id = res->get_instance_id();
|
||||
total += img.vram;
|
||||
imgs.push_back(img);
|
||||
}
|
||||
|
@ -387,7 +387,7 @@ void _OS::print_resources_by_type(const Vector<String> &p_types) {
|
|||
List<Ref<Resource>> resources;
|
||||
ResourceCache::get_cached_resources(&resources);
|
||||
|
||||
for (Ref<Resource> r : resources) {
|
||||
for (const Ref<Resource> &r : resources) {
|
||||
bool found = false;
|
||||
|
||||
for (int i = 0; i < p_types.size(); i++) {
|
||||
|
|
|
@ -1648,7 +1648,7 @@ void EditorInspector::update_tree() {
|
|||
|
||||
Color sscolor = get_theme_color(SNAME("prop_subsection"), SNAME("Editor"));
|
||||
|
||||
for (Ref<EditorInspectorPlugin> ped : valid_plugins) {
|
||||
for (Ref<EditorInspectorPlugin> &ped : valid_plugins) {
|
||||
ped->parse_begin(object);
|
||||
_parse_added_editors(main_vbox, ped);
|
||||
}
|
||||
|
@ -1745,7 +1745,7 @@ void EditorInspector::update_tree() {
|
|||
category->set_tooltip(p.name + "::" + (class_descr_cache[type2] == "" ? "" : class_descr_cache[type2]));
|
||||
}
|
||||
|
||||
for (Ref<EditorInspectorPlugin> ped : valid_plugins) {
|
||||
for (Ref<EditorInspectorPlugin> &ped : valid_plugins) {
|
||||
ped->parse_category(object, p.name);
|
||||
_parse_added_editors(main_vbox, ped);
|
||||
}
|
||||
|
@ -1946,7 +1946,7 @@ void EditorInspector::update_tree() {
|
|||
doc_hint = descr;
|
||||
}
|
||||
|
||||
for (Ref<EditorInspectorPlugin> ped : valid_plugins) {
|
||||
for (Ref<EditorInspectorPlugin> &ped : valid_plugins) {
|
||||
bool exclusive = ped->parse_property(object, p.type, p.name, p.hint, p.hint_string, p.usage, wide_editors);
|
||||
|
||||
List<EditorInspectorPlugin::AddedEditor> editors = ped->added_editors; //make a copy, since plugins may be used again in a sub-inspector
|
||||
|
@ -2028,7 +2028,7 @@ void EditorInspector::update_tree() {
|
|||
}
|
||||
}
|
||||
|
||||
for (Ref<EditorInspectorPlugin> ped : valid_plugins) {
|
||||
for (Ref<EditorInspectorPlugin> &ped : valid_plugins) {
|
||||
ped->parse_end();
|
||||
_parse_added_editors(main_vbox, ped);
|
||||
}
|
||||
|
@ -2595,7 +2595,7 @@ void EditorInspector::_update_script_class_properties(const Object &p_object, Li
|
|||
}
|
||||
|
||||
Set<StringName> added;
|
||||
for (Ref<Script> s : classes) {
|
||||
for (const Ref<Script> &s : classes) {
|
||||
String path = s->get_path();
|
||||
String name = EditorNode::get_editor_data().script_class_get_name(path);
|
||||
if (name.is_empty()) {
|
||||
|
|
|
@ -797,8 +797,8 @@ void EditorNode::_resources_changed(const Vector<String> &p_resources) {
|
|||
}
|
||||
|
||||
if (changed.size()) {
|
||||
for (Ref<Resource> E : changed) {
|
||||
E->reload_from_file();
|
||||
for (Ref<Resource> &res : changed) {
|
||||
res->reload_from_file();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1551,7 +1551,7 @@ int EditorNode::_save_external_resources() {
|
|||
int saved = 0;
|
||||
List<Ref<Resource>> cached;
|
||||
ResourceCache::get_cached_resources(&cached);
|
||||
for (Ref<Resource> res : cached) {
|
||||
for (const Ref<Resource> &res : cached) {
|
||||
if (!res->get_path().is_resource_file()) {
|
||||
continue;
|
||||
}
|
||||
|
@ -1641,7 +1641,7 @@ void EditorNode::_save_scene(String p_file, int idx) {
|
|||
|
||||
editor_data.save_editor_external_data();
|
||||
|
||||
for (Ref<AnimatedValuesBackup> E : anim_backups) {
|
||||
for (Ref<AnimatedValuesBackup> &E : anim_backups) {
|
||||
E->restore();
|
||||
}
|
||||
|
||||
|
@ -5333,27 +5333,6 @@ void EditorNode::_file_access_close_error_notify(const String &p_str) {
|
|||
}
|
||||
|
||||
void EditorNode::reload_scene(const String &p_path) {
|
||||
/*
|
||||
* No longer necessary since scenes now reset and reload their internal resource if needed.
|
||||
//first of all, reload internal textures, materials, meshes, etc. as they might have changed on disk
|
||||
|
||||
List<Ref<Resource>> cached;
|
||||
ResourceCache::get_cached_resources(&cached);
|
||||
List<Ref<Resource>> to_clear; //clear internal resources from previous scene from being used
|
||||
for (Ref<Resource> E : cached) {
|
||||
if (E->get_path().begins_with(p_path + "::")) { //subresources of existing scene
|
||||
to_clear.push_back(E);
|
||||
}
|
||||
}
|
||||
|
||||
//so reload reloads everything, clear subresources of previous scene
|
||||
while (to_clear.front()) {
|
||||
to_clear.front()->get()->set_path("");
|
||||
to_clear.pop_front();
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
int scene_idx = -1;
|
||||
for (int i = 0; i < editor_data.get_edited_scene_count(); i++) {
|
||||
if (editor_data.get_scene_path(i) == p_path) {
|
||||
|
|
|
@ -1580,7 +1580,7 @@ void EditorSettings::set_builtin_action_override(const String &p_name, const Arr
|
|||
int event_idx = 0;
|
||||
|
||||
// Check equality of each event.
|
||||
for (Ref<InputEvent> E : builtin_events) {
|
||||
for (const Ref<InputEvent> &E : builtin_events) {
|
||||
if (!E->is_match(p_events[event_idx])) {
|
||||
same_as_builtin = false;
|
||||
break;
|
||||
|
@ -1610,7 +1610,7 @@ const Array EditorSettings::get_builtin_action_overrides(const String &p_name) c
|
|||
Array event_array;
|
||||
|
||||
List<Ref<InputEvent>> events_list = AO->get();
|
||||
for (Ref<InputEvent> E : events_list) {
|
||||
for (const Ref<InputEvent> &E : events_list) {
|
||||
event_array.push_back(E);
|
||||
}
|
||||
return event_array;
|
||||
|
|
|
@ -1237,7 +1237,7 @@ void FileSystemDock::_update_resource_paths_after_move(const Map<String, String>
|
|||
List<Ref<Resource>> cached;
|
||||
ResourceCache::get_cached_resources(&cached);
|
||||
|
||||
for (Ref<Resource> r : cached) {
|
||||
for (Ref<Resource> &r : cached) {
|
||||
String base_path = r->get_path();
|
||||
String extra_path;
|
||||
int sep_pos = r->get_path().find("::");
|
||||
|
|
|
@ -438,7 +438,7 @@ Node *EditorOBJImporter::import_scene(const String &p_path, uint32_t p_flags, in
|
|||
|
||||
Node3D *scene = memnew(Node3D);
|
||||
|
||||
for (Ref<Mesh> m : meshes) {
|
||||
for (const Ref<Mesh> &m : meshes) {
|
||||
Ref<EditorSceneImporterMesh> mesh;
|
||||
mesh.instantiate();
|
||||
for (int i = 0; i < m->get_surface_count(); i++) {
|
||||
|
|
|
@ -106,7 +106,7 @@ void ImportDefaultsEditor::_update_importer() {
|
|||
List<Ref<ResourceImporter>> importer_list;
|
||||
ResourceFormatImporter::get_singleton()->get_importers(&importer_list);
|
||||
Ref<ResourceImporter> importer;
|
||||
for (Ref<ResourceImporter> E : importer_list) {
|
||||
for (const Ref<ResourceImporter> &E : importer_list) {
|
||||
if (E->get_visible_name() == importers->get_item_text(importers->get_selected())) {
|
||||
importer = E;
|
||||
break;
|
||||
|
@ -166,7 +166,7 @@ void ImportDefaultsEditor::clear() {
|
|||
List<Ref<ResourceImporter>> importer_list;
|
||||
ResourceFormatImporter::get_singleton()->get_importers(&importer_list);
|
||||
Vector<String> names;
|
||||
for (Ref<ResourceImporter> E : importer_list) {
|
||||
for (const Ref<ResourceImporter> &E : importer_list) {
|
||||
String vn = E->get_visible_name();
|
||||
names.push_back(vn);
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ void ImportDock::set_edit_path(const String &p_path) {
|
|||
ResourceFormatImporter::get_singleton()->get_importers_for_extension(p_path.get_extension(), &importers);
|
||||
List<Pair<String, String>> importer_names;
|
||||
|
||||
for (Ref<ResourceImporter> E : importers) {
|
||||
for (const Ref<ResourceImporter> &E : importers) {
|
||||
importer_names.push_back(Pair<String, String>(E->get_visible_name(), E->get_importer_name()));
|
||||
}
|
||||
|
||||
|
@ -254,7 +254,7 @@ void ImportDock::set_edit_multiple_paths(const Vector<String> &p_paths) {
|
|||
ResourceFormatImporter::get_singleton()->get_importers_for_extension(p_paths[0].get_extension(), &importers);
|
||||
List<Pair<String, String>> importer_names;
|
||||
|
||||
for (Ref<ResourceImporter> E : importers) {
|
||||
for (const Ref<ResourceImporter> &E : importers) {
|
||||
importer_names.push_back(Pair<String, String>(E->get_visible_name(), E->get_importer_name()));
|
||||
}
|
||||
|
||||
|
|
|
@ -365,7 +365,7 @@ void SpriteFramesEditor::_file_load_request(const Vector<String> &p_path, int p_
|
|||
|
||||
int count = 0;
|
||||
|
||||
for (Ref<Texture2D> &E : resources) {
|
||||
for (const Ref<Texture2D> &E : resources) {
|
||||
undo_redo->add_do_method(frames, "add_frame", edited_anim, E, p_at_pos == -1 ? -1 : p_at_pos + count);
|
||||
undo_redo->add_undo_method(frames, "remove_frame", edited_anim, p_at_pos == -1 ? fc : p_at_pos);
|
||||
count++;
|
||||
|
|
|
@ -2906,7 +2906,7 @@ void ThemeTypeEditor::_update_stylebox_from_leading() {
|
|||
continue;
|
||||
}
|
||||
|
||||
for (Ref<StyleBox> F : styleboxes) {
|
||||
for (const Ref<StyleBox> &F : styleboxes) {
|
||||
Ref<StyleBox> sb = F;
|
||||
sb->set(E.name, value);
|
||||
}
|
||||
|
|
|
@ -403,7 +403,7 @@ void EditorSettingsDialog::_shortcut_button_pressed(Object *p_item, int p_column
|
|||
List<Ref<InputEvent>> defaults = InputMap::get_singleton()->get_builtins()[current_action];
|
||||
|
||||
// Convert the list to an array, and only keep key events as this is for the editor.
|
||||
for (Ref<InputEvent> k : defaults) {
|
||||
for (const Ref<InputEvent> &k : defaults) {
|
||||
if (k.is_valid()) {
|
||||
events.append(k);
|
||||
}
|
||||
|
|
|
@ -1804,10 +1804,10 @@ void GDScriptLanguage::reload_all_scripts() {
|
|||
|
||||
scripts.sort_custom<GDScriptDepSort>(); //update in inheritance dependency order
|
||||
|
||||
for (Ref<GDScript> E : scripts) {
|
||||
print_verbose("GDScript: Reloading: " + E->get_path());
|
||||
E->load_source_code(E->get_path());
|
||||
E->reload(true);
|
||||
for (Ref<GDScript> &script : scripts) {
|
||||
print_verbose("GDScript: Reloading: " + script->get_path());
|
||||
script->load_source_code(script->get_path());
|
||||
script->reload(true);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
@ -1836,21 +1836,21 @@ void GDScriptLanguage::reload_tool_script(const Ref<Script> &p_script, bool p_so
|
|||
|
||||
scripts.sort_custom<GDScriptDepSort>(); //update in inheritance dependency order
|
||||
|
||||
for (Ref<GDScript> E : scripts) {
|
||||
bool reload = E == p_script || to_reload.has(E->get_base());
|
||||
for (Ref<GDScript> &script : scripts) {
|
||||
bool reload = script == p_script || to_reload.has(script->get_base());
|
||||
|
||||
if (!reload) {
|
||||
continue;
|
||||
}
|
||||
|
||||
to_reload.insert(E, Map<ObjectID, List<Pair<StringName, Variant>>>());
|
||||
to_reload.insert(script, Map<ObjectID, List<Pair<StringName, Variant>>>());
|
||||
|
||||
if (!p_soft_reload) {
|
||||
//save state and remove script from instances
|
||||
Map<ObjectID, List<Pair<StringName, Variant>>> &map = to_reload[E];
|
||||
Map<ObjectID, List<Pair<StringName, Variant>>> &map = to_reload[script];
|
||||
|
||||
while (E->instances.front()) {
|
||||
Object *obj = E->instances.front()->get();
|
||||
while (script->instances.front()) {
|
||||
Object *obj = script->instances.front()->get();
|
||||
//save instance info
|
||||
List<Pair<StringName, Variant>> state;
|
||||
if (obj->get_script_instance()) {
|
||||
|
@ -1863,8 +1863,8 @@ void GDScriptLanguage::reload_tool_script(const Ref<Script> &p_script, bool p_so
|
|||
//same thing for placeholders
|
||||
#ifdef TOOLS_ENABLED
|
||||
|
||||
while (E->placeholders.size()) {
|
||||
Object *obj = E->placeholders.front()->get()->get_owner();
|
||||
while (script->placeholders.size()) {
|
||||
Object *obj = script->placeholders.front()->get()->get_owner();
|
||||
|
||||
//save instance info
|
||||
if (obj->get_script_instance()) {
|
||||
|
@ -1874,13 +1874,13 @@ void GDScriptLanguage::reload_tool_script(const Ref<Script> &p_script, bool p_so
|
|||
obj->set_script(Variant());
|
||||
} else {
|
||||
// no instance found. Let's remove it so we don't loop forever
|
||||
E->placeholders.erase(E->placeholders.front()->get());
|
||||
script->placeholders.erase(script->placeholders.front()->get());
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
for (Map<ObjectID, List<Pair<StringName, Variant>>>::Element *F = E->pending_reload_state.front(); F; F = F->next()) {
|
||||
for (Map<ObjectID, List<Pair<StringName, Variant>>>::Element *F = script->pending_reload_state.front(); F; F = F->next()) {
|
||||
map[F->key()] = F->get(); //pending to reload, use this one instead
|
||||
}
|
||||
}
|
||||
|
|
|
@ -874,7 +874,7 @@ void CSharpLanguage::reload_assemblies(bool p_soft_reload) {
|
|||
|
||||
// As scripts are going to be reloaded, must proceed without locking here
|
||||
|
||||
for (Ref<CSharpScript> script : scripts) {
|
||||
for (Ref<CSharpScript> &script : scripts) {
|
||||
to_reload.push_back(script);
|
||||
|
||||
if (script->get_path().is_empty()) {
|
||||
|
@ -934,7 +934,7 @@ void CSharpLanguage::reload_assemblies(bool p_soft_reload) {
|
|||
}
|
||||
|
||||
// After the state of all instances is saved, clear scripts and script instances
|
||||
for (Ref<CSharpScript> script : scripts) {
|
||||
for (Ref<CSharpScript> &script : scripts) {
|
||||
while (script->instances.front()) {
|
||||
Object *obj = script->instances.front()->get();
|
||||
obj->set_script(REF()); // Remove script and existing script instances (placeholder are not removed before domain reload)
|
||||
|
@ -947,7 +947,7 @@ void CSharpLanguage::reload_assemblies(bool p_soft_reload) {
|
|||
if (gdmono->reload_scripts_domain() != OK) {
|
||||
// Failed to reload the scripts domain
|
||||
// Make sure to add the scripts back to their owners before returning
|
||||
for (Ref<CSharpScript> scr : to_reload) {
|
||||
for (Ref<CSharpScript> &scr : to_reload) {
|
||||
for (const Map<ObjectID, CSharpScript::StateBackup>::Element *F = scr->pending_reload_state.front(); F; F = F->next()) {
|
||||
Object *obj = ObjectDB::get_instance(F->key());
|
||||
|
||||
|
@ -982,7 +982,7 @@ void CSharpLanguage::reload_assemblies(bool p_soft_reload) {
|
|||
|
||||
List<Ref<CSharpScript>> to_reload_state;
|
||||
|
||||
for (Ref<CSharpScript> script : to_reload) {
|
||||
for (Ref<CSharpScript> &script : to_reload) {
|
||||
#ifdef TOOLS_ENABLED
|
||||
script->exports_invalidated = true;
|
||||
#endif
|
||||
|
@ -1087,7 +1087,7 @@ void CSharpLanguage::reload_assemblies(bool p_soft_reload) {
|
|||
to_reload_state.push_back(script);
|
||||
}
|
||||
|
||||
for (Ref<CSharpScript> script : to_reload_state) {
|
||||
for (Ref<CSharpScript> &script : to_reload_state) {
|
||||
for (Set<ObjectID>::Element *F = script->pending_reload_instances.front(); F; F = F->next()) {
|
||||
ObjectID obj_id = F->get();
|
||||
Object *obj = ObjectDB::get_instance(obj_id);
|
||||
|
|
|
@ -154,7 +154,7 @@ void WebRTCMultiplayerPeer::_find_next_peer() {
|
|||
}
|
||||
// After last.
|
||||
while (E) {
|
||||
for (Ref<WebRTCDataChannel> F : E->get()->channels) {
|
||||
for (const Ref<WebRTCDataChannel> &F : E->get()->channels) {
|
||||
if (F->get_available_packet_count()) {
|
||||
next_packet_peer = E->key();
|
||||
return;
|
||||
|
@ -165,7 +165,7 @@ void WebRTCMultiplayerPeer::_find_next_peer() {
|
|||
E = peer_map.front();
|
||||
// Before last
|
||||
while (E) {
|
||||
for (Ref<WebRTCDataChannel> F : E->get()->channels) {
|
||||
for (const Ref<WebRTCDataChannel> &F : E->get()->channels) {
|
||||
if (F->get_available_packet_count()) {
|
||||
next_packet_peer = E->key();
|
||||
return;
|
||||
|
@ -213,7 +213,7 @@ int WebRTCMultiplayerPeer::get_unique_id() const {
|
|||
|
||||
void WebRTCMultiplayerPeer::_peer_to_dict(Ref<ConnectedPeer> p_connected_peer, Dictionary &r_dict) {
|
||||
Array channels;
|
||||
for (Ref<WebRTCDataChannel> F : p_connected_peer->channels) {
|
||||
for (Ref<WebRTCDataChannel> &F : p_connected_peer->channels) {
|
||||
channels.push_back(F);
|
||||
}
|
||||
r_dict["connection"] = p_connected_peer->connection;
|
||||
|
@ -297,7 +297,7 @@ Error WebRTCMultiplayerPeer::get_packet(const uint8_t **r_buffer, int &r_buffer_
|
|||
_find_next_peer();
|
||||
ERR_FAIL_V(ERR_UNAVAILABLE);
|
||||
}
|
||||
for (Ref<WebRTCDataChannel> E : peer_map[next_packet_peer]->channels) {
|
||||
for (Ref<WebRTCDataChannel> &E : peer_map[next_packet_peer]->channels) {
|
||||
if (E->get_available_packet_count()) {
|
||||
Error err = E->get_packet(r_buffer, r_buffer_size);
|
||||
_find_next_peer();
|
||||
|
@ -357,7 +357,7 @@ int WebRTCMultiplayerPeer::get_available_packet_count() const {
|
|||
}
|
||||
int size = 0;
|
||||
for (Map<int, Ref<ConnectedPeer>>::Element *E = peer_map.front(); E; E = E->next()) {
|
||||
for (Ref<WebRTCDataChannel> F : E->get()->channels) {
|
||||
for (const Ref<WebRTCDataChannel> &F : E->get()->channels) {
|
||||
size += F->get_available_packet_count();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,7 +196,7 @@ void WSLServer::poll() {
|
|||
remove_ids.clear();
|
||||
|
||||
List<Ref<PendingPeer>> remove_peers;
|
||||
for (Ref<PendingPeer> E : _pending) {
|
||||
for (const Ref<PendingPeer> &E : _pending) {
|
||||
String resource_name;
|
||||
Ref<PendingPeer> ppeer = E;
|
||||
Error err = ppeer->do_handshake(_protocols, handshake_timeout, resource_name);
|
||||
|
@ -224,7 +224,7 @@ void WSLServer::poll() {
|
|||
remove_peers.push_back(ppeer);
|
||||
_on_connect(id, ppeer->protocol, resource_name);
|
||||
}
|
||||
for (Ref<PendingPeer> E : remove_peers) {
|
||||
for (const Ref<PendingPeer> &E : remove_peers) {
|
||||
_pending.erase(E);
|
||||
}
|
||||
remove_peers.clear();
|
||||
|
|
|
@ -46,8 +46,8 @@ void Tween::start_tweeners() {
|
|||
ERR_FAIL_MSG("Tween without commands, aborting.");
|
||||
}
|
||||
|
||||
for (Ref<Tweener> E : tweeners.write[current_step]) {
|
||||
E->start();
|
||||
for (Ref<Tweener> &tweener : tweeners.write[current_step]) {
|
||||
tweener->start();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -253,11 +253,11 @@ bool Tween::step(float p_delta) {
|
|||
float step_delta = rem_delta;
|
||||
step_active = false;
|
||||
|
||||
for (Ref<Tweener> E : tweeners.write[current_step]) {
|
||||
for (Ref<Tweener> &tweener : tweeners.write[current_step]) {
|
||||
// Modified inside Tweener.step().
|
||||
float temp_delta = rem_delta;
|
||||
// Turns to true if any Tweener returns true (i.e. is still not finished).
|
||||
step_active = E->step(temp_delta) || step_active;
|
||||
step_active = tweener->step(temp_delta) || step_active;
|
||||
step_delta = MIN(temp_delta, rem_delta);
|
||||
}
|
||||
|
||||
|
|
|
@ -571,8 +571,8 @@ void SceneTree::finalize() {
|
|||
}
|
||||
|
||||
// cleanup timers
|
||||
for (Ref<SceneTreeTimer> E : timers) {
|
||||
E->release_connections();
|
||||
for (Ref<SceneTreeTimer> &timer : timers) {
|
||||
timer->release_connections();
|
||||
}
|
||||
timers.clear();
|
||||
}
|
||||
|
@ -1146,8 +1146,8 @@ Array SceneTree::get_processed_tweens() {
|
|||
ret.resize(tweens.size());
|
||||
|
||||
int i = 0;
|
||||
for (Ref<Tween> E : tweens) {
|
||||
ret[i] = E;
|
||||
for (const Ref<Tween> &tween : tweens) {
|
||||
ret[i] = tween;
|
||||
i++;
|
||||
}
|
||||
|
||||
|
|
|
@ -573,7 +573,7 @@ void Viewport::_process_picking() {
|
|||
// if no mouse event exists, create a motion one. This is necessary because objects or camera may have moved.
|
||||
// while this extra event is sent, it is checked if both camera and last object and last ID did not move. If nothing changed, the event is discarded to avoid flooding with unnecessary motion events every frame
|
||||
bool has_mouse_event = false;
|
||||
for (Ref<InputEvent> m : physics_picking_events) {
|
||||
for (const Ref<InputEvent> &m : physics_picking_events) {
|
||||
if (m.is_valid()) {
|
||||
has_mouse_event = true;
|
||||
break;
|
||||
|
|
Loading…
Reference in a new issue