Merge pull request #104432 from YYF233333/dict_iter2
Do not iterate `Dictionary` with `Dictionary::keys()`
This commit is contained in:
@ -1343,16 +1343,16 @@ static bool compare_value(const String &p_path, const String &p_field, const Var
|
||||
} else if (p_old_value.get_type() == Variant::DICTIONARY && p_new_value.get_type() == Variant::DICTIONARY) {
|
||||
Dictionary old_dict = p_old_value;
|
||||
Dictionary new_dict = p_new_value;
|
||||
for (const Variant &key : old_dict.keys()) {
|
||||
if (!new_dict.has(key)) {
|
||||
for (const KeyValue<Variant, Variant> &kv : old_dict) {
|
||||
if (!new_dict.has(kv.key)) {
|
||||
failed = true;
|
||||
print_error(vformat("Validate extension JSON: Error: Field '%s': %s was removed.", p_path, key));
|
||||
print_error(vformat("Validate extension JSON: Error: Field '%s': %s was removed.", p_path, kv.key));
|
||||
continue;
|
||||
}
|
||||
if (p_allow_name_change && key == "name") {
|
||||
if (p_allow_name_change && kv.key == "name") {
|
||||
continue;
|
||||
}
|
||||
if (!compare_value(path, key, old_dict[key], new_dict[key], p_allow_name_change)) {
|
||||
if (!compare_value(path, kv.key, kv.value, new_dict[kv.key], p_allow_name_change)) {
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -70,10 +70,9 @@ Error HTTPClient::_request(Method p_method, const String &p_url, const Vector<St
|
||||
|
||||
String HTTPClient::query_string_from_dict(const Dictionary &p_dict) {
|
||||
String query = "";
|
||||
Array keys = p_dict.keys();
|
||||
for (int i = 0; i < keys.size(); ++i) {
|
||||
String encoded_key = String(keys[i]).uri_encode();
|
||||
const Variant &value = p_dict[keys[i]];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_dict) {
|
||||
String encoded_key = String(kv.key).uri_encode();
|
||||
const Variant &value = kv.value;
|
||||
switch (value.get_type()) {
|
||||
case Variant::ARRAY: {
|
||||
// Repeat the key with every values
|
||||
|
||||
@ -291,9 +291,8 @@ void ScriptEditorDebugger::_remote_object_selected(ObjectID p_id) {
|
||||
}
|
||||
|
||||
void ScriptEditorDebugger::_remote_objects_edited(const String &p_prop, const TypedDictionary<uint64_t, Variant> &p_values, const String &p_field) {
|
||||
const Array &ids = p_values.keys();
|
||||
for (uint64_t id : ids) {
|
||||
update_remote_object(ObjectID(id), p_prop, p_values[id], p_field);
|
||||
for (const KeyValue<Variant, Variant> &kv : p_values) {
|
||||
update_remote_object(ObjectID(static_cast<uint64_t>(kv.key)), p_prop, kv.value, p_field);
|
||||
}
|
||||
request_remote_objects(p_values.keys(), false);
|
||||
}
|
||||
|
||||
@ -463,17 +463,16 @@ void DependencyRemoveDialog::_find_localization_remaps_of_removed_files(Vector<R
|
||||
p_removed.push_back(dep);
|
||||
}
|
||||
|
||||
Array remap_keys = remaps.keys();
|
||||
for (int j = 0; j < remap_keys.size(); j++) {
|
||||
PackedStringArray remapped_files = remaps[remap_keys[j]];
|
||||
for (int k = 0; k < remapped_files.size(); k++) {
|
||||
int splitter_pos = remapped_files[k].rfind_char(':');
|
||||
String res_path = remapped_files[k].substr(0, splitter_pos);
|
||||
for (const KeyValue<Variant, Variant> &remap_kv : remaps) {
|
||||
PackedStringArray remapped_files = remap_kv.value;
|
||||
for (const String &remapped_file : remapped_files) {
|
||||
int splitter_pos = remapped_file.rfind_char(':');
|
||||
String res_path = remapped_file.substr(0, splitter_pos);
|
||||
if (res_path == path) {
|
||||
String locale_name = remapped_files[k].substr(splitter_pos + 1);
|
||||
String locale_name = remapped_file.substr(splitter_pos + 1);
|
||||
|
||||
RemovedDependency dep;
|
||||
dep.file = vformat(TTR("Localization remap for path '%s' and locale '%s'."), remap_keys[j], locale_name);
|
||||
dep.file = vformat(TTR("Localization remap for path '%s' and locale '%s'."), remap_kv.key, locale_name);
|
||||
dep.file_type = "";
|
||||
dep.dependency = path;
|
||||
dep.dependency_folder = files.value;
|
||||
|
||||
@ -294,11 +294,10 @@ void EditorCommandPalette::register_shortcuts_as_command() {
|
||||
|
||||
// Load command use history.
|
||||
Dictionary command_history = EditorSettings::get_singleton()->get_project_metadata("command_palette", "command_history", Dictionary());
|
||||
Array history_entries = command_history.keys();
|
||||
for (int i = 0; i < history_entries.size(); i++) {
|
||||
const String &history_key = history_entries[i];
|
||||
for (const KeyValue<Variant, Variant> &history_kv : command_history) {
|
||||
const String &history_key = history_kv.key;
|
||||
if (commands.has(history_key)) {
|
||||
commands[history_key].last_used = command_history[history_key];
|
||||
commands[history_key].last_used = history_kv.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2229,10 +2229,8 @@ void EditorInspectorArray::_move_element(int p_element_index, int p_to_pos) {
|
||||
undo_redo->add_undo_property(object, count_property, properties_as_array.size());
|
||||
for (int i = 0; i < (int)properties_as_array.size(); i++) {
|
||||
Dictionary d = Dictionary(properties_as_array[i]);
|
||||
Array keys = d.keys();
|
||||
for (int j = 0; j < keys.size(); j++) {
|
||||
String key = keys[j];
|
||||
undo_redo->add_undo_property(object, vformat(key, i), d[key]);
|
||||
for (const KeyValue<Variant, Variant> &kv : d) {
|
||||
undo_redo->add_undo_property(object, vformat(kv.key, i), kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2252,10 +2250,8 @@ void EditorInspectorArray::_move_element(int p_element_index, int p_to_pos) {
|
||||
undo_redo->add_do_property(object, count_property, properties_as_array.size());
|
||||
for (int i = 0; i < (int)properties_as_array.size(); i++) {
|
||||
Dictionary d = properties_as_array[i];
|
||||
Array keys = d.keys();
|
||||
for (int j = 0; j < keys.size(); j++) {
|
||||
String key = keys[j];
|
||||
undo_redo->add_do_property(object, vformat(key, i), d[key]);
|
||||
for (const KeyValue<Variant, Variant> &kv : d) {
|
||||
undo_redo->add_do_property(object, vformat(kv.key, i), kv.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2306,10 +2302,8 @@ void EditorInspectorArray::_clear_array() {
|
||||
undo_redo->add_undo_property(object, count_property, count);
|
||||
for (int i = 0; i < (int)properties_as_array.size(); i++) {
|
||||
Dictionary d = Dictionary(properties_as_array[i]);
|
||||
Array keys = d.keys();
|
||||
for (int j = 0; j < keys.size(); j++) {
|
||||
String key = keys[j];
|
||||
undo_redo->add_undo_property(object, vformat(key, i), d[key]);
|
||||
for (const KeyValue<Variant, Variant> &kv : d) {
|
||||
undo_redo->add_undo_property(object, vformat(kv.key, i), kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2371,10 +2365,8 @@ void EditorInspectorArray::_resize_array(int p_size) {
|
||||
undo_redo->add_undo_property(object, count_property, count);
|
||||
for (int i = count - 1; i > p_size - 1; i--) {
|
||||
Dictionary d = Dictionary(properties_as_array[i]);
|
||||
Array keys = d.keys();
|
||||
for (int j = 0; j < keys.size(); j++) {
|
||||
String key = keys[j];
|
||||
undo_redo->add_undo_property(object, vformat(key, i), d[key]);
|
||||
for (const KeyValue<Variant, Variant> &kv : d) {
|
||||
undo_redo->add_undo_property(object, vformat(kv.key, i), kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1585,9 +1585,9 @@ Error EditorExportPlatform::export_project_files(const Ref<EditorExportPreset> &
|
||||
}
|
||||
|
||||
Dictionary int_export = get_internal_export_files(p_preset, p_debug);
|
||||
for (const Variant &int_name : int_export.keys()) {
|
||||
const PackedByteArray &array = int_export[int_name];
|
||||
err = save_proxy.save_file(p_udata, int_name, array, idx, total, enc_in_filters, enc_ex_filters, key, seed);
|
||||
for (const KeyValue<Variant, Variant> &int_export_kv : int_export) {
|
||||
const PackedByteArray &array = int_export_kv.value;
|
||||
err = save_proxy.save_file(p_udata, int_export_kv.key, array, idx, total, enc_in_filters, enc_ex_filters, key, seed);
|
||||
if (err != OK) {
|
||||
return err;
|
||||
}
|
||||
|
||||
@ -208,10 +208,9 @@ void EditorExportPreset::update_value_overrides() {
|
||||
|
||||
Dictionary plugin_overrides = export_plugins[i]->_get_export_options_overrides(platform);
|
||||
if (!plugin_overrides.is_empty()) {
|
||||
Array keys = plugin_overrides.keys();
|
||||
for (int x = 0; x < keys.size(); x++) {
|
||||
StringName key = keys[x];
|
||||
Variant value = plugin_overrides[key];
|
||||
for (const KeyValue<Variant, Variant> &kv : plugin_overrides) {
|
||||
const StringName &key = kv.key;
|
||||
const Variant &value = kv.value;
|
||||
if (new_value_overrides.has(key) && new_value_overrides[key] != value) {
|
||||
WARN_PRINT_ED(vformat("Editor export plugin '%s' overrides pre-existing export option override '%s' with new value.", export_plugins[i]->get_name(), key));
|
||||
}
|
||||
|
||||
@ -2942,12 +2942,11 @@ static Error convert_path_to_uid(ResourceUID::ID p_source_id, const String &p_ha
|
||||
}
|
||||
|
||||
Error ResourceImporterScene::_check_resource_save_paths(ResourceUID::ID p_source_id, const String &p_hash_suffix, const Dictionary &p_data) {
|
||||
Array keys = p_data.keys();
|
||||
for (int di = 0; di < keys.size(); di++) {
|
||||
Dictionary settings = p_data[keys[di]];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_data) {
|
||||
Dictionary settings = kv.value;
|
||||
|
||||
if (bool(settings.get("save_to_file/enabled", false)) && settings.has("save_to_file/path")) {
|
||||
String to_hash = keys[di].operator String() + p_hash_suffix;
|
||||
String to_hash = kv.key.operator String() + p_hash_suffix;
|
||||
Error ret = convert_path_to_uid(p_source_id, to_hash, settings, "save_to_file/path", "save_to_file/fallback_path");
|
||||
ERR_FAIL_COND_V_MSG(ret != OK, ret, vformat("Resource save path %s not valid. Ensure parent directory has been created.", settings.has("save_to_file/path")));
|
||||
}
|
||||
@ -2957,7 +2956,7 @@ Error ResourceImporterScene::_check_resource_save_paths(ResourceUID::ID p_source
|
||||
for (int si = 0; si < slice_count; si++) {
|
||||
if (bool(settings.get("slice_" + itos(si + 1) + "/save_to_file/enabled", false)) &&
|
||||
settings.has("slice_" + itos(si + 1) + "/save_to_file/path")) {
|
||||
String to_hash = keys[di].operator String() + p_hash_suffix + itos(si + 1);
|
||||
String to_hash = kv.key.operator String() + p_hash_suffix + itos(si + 1);
|
||||
Error ret = convert_path_to_uid(p_source_id, to_hash, settings,
|
||||
"slice_" + itos(si + 1) + "/save_to_file/path",
|
||||
"slice_" + itos(si + 1) + "/save_to_file/fallback_path");
|
||||
|
||||
@ -439,8 +439,8 @@ void LocalizationEditor::_filesystem_files_moved(const String &p_old_file, const
|
||||
|
||||
// Check for the Array elements of the values.
|
||||
Array remap_keys = remaps.keys();
|
||||
for (int i = 0; i < remap_keys.size(); i++) {
|
||||
PackedStringArray remapped_files = remaps[remap_keys[i]];
|
||||
for (const Variant &remap_key : remap_keys) {
|
||||
PackedStringArray remapped_files = remaps[remap_key];
|
||||
bool remapped_files_updated = false;
|
||||
|
||||
for (int j = 0; j < remapped_files.size(); j++) {
|
||||
@ -454,12 +454,12 @@ void LocalizationEditor::_filesystem_files_moved(const String &p_old_file, const
|
||||
remapped_files.remove_at(j + 1);
|
||||
remaps_changed = true;
|
||||
remapped_files_updated = true;
|
||||
print_verbose(vformat("Changed remap value \"%s\" to \"%s\" of key \"%s\" due to a moved file.", res_path + ":" + locale_name, remapped_files[j], remap_keys[i]));
|
||||
print_verbose(vformat("Changed remap value \"%s\" to \"%s\" of key \"%s\" due to a moved file.", res_path + ":" + locale_name, remapped_files[j], remap_key));
|
||||
}
|
||||
}
|
||||
|
||||
if (remapped_files_updated) {
|
||||
remaps[remap_keys[i]] = remapped_files;
|
||||
remaps[remap_key] = remapped_files;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -708,11 +708,11 @@ void EditorAssetLibrary::_update_repository_options() {
|
||||
default_urls["godotengine.org (Official)"] = "https://godotengine.org/asset-library/api";
|
||||
Dictionary available_urls = _EDITOR_DEF("asset_library/available_urls", default_urls, true);
|
||||
repository->clear();
|
||||
Array keys = available_urls.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
String key = keys[i];
|
||||
repository->add_item(key);
|
||||
repository->set_item_metadata(i, available_urls[key]);
|
||||
int i = 0;
|
||||
for (const KeyValue<Variant, Variant> &kv : available_urls) {
|
||||
repository->add_item(kv.key);
|
||||
repository->set_item_metadata(i, kv.value);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -8100,21 +8100,19 @@ void Node3DEditor::_snap_selected_nodes_to_floor() {
|
||||
PhysicsDirectSpaceState3D *ss = get_tree()->get_root()->get_world_3d()->get_direct_space_state();
|
||||
PhysicsDirectSpaceState3D::RayResult result;
|
||||
|
||||
Array keys = snap_data.keys();
|
||||
|
||||
// The maximum height an object can travel to be snapped
|
||||
const float max_snap_height = 500.0;
|
||||
|
||||
// Will be set to `true` if at least one node from the selection was successfully snapped
|
||||
bool snapped_to_floor = false;
|
||||
|
||||
if (keys.size()) {
|
||||
if (!snap_data.is_empty()) {
|
||||
// For snapping to be performed, there must be solid geometry under at least one of the selected nodes.
|
||||
// We need to check this before snapping to register the undo/redo action only if needed.
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
Node *node = Object::cast_to<Node>(keys[i]);
|
||||
for (const KeyValue<Variant, Variant> &kv : snap_data) {
|
||||
Node *node = Object::cast_to<Node>(kv.key);
|
||||
Node3D *sp = Object::cast_to<Node3D>(node);
|
||||
Dictionary d = snap_data[node];
|
||||
Dictionary d = kv.value;
|
||||
Vector3 from = d["from"];
|
||||
Vector3 to = from - Vector3(0.0, max_snap_height, 0.0);
|
||||
HashSet<RID> excluded = _get_physics_bodies_rid(sp);
|
||||
@ -8134,10 +8132,10 @@ void Node3DEditor::_snap_selected_nodes_to_floor() {
|
||||
undo_redo->create_action(TTR("Snap Nodes to Floor"));
|
||||
|
||||
// Perform snapping if at least one node can be snapped
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
Node *node = Object::cast_to<Node>(keys[i]);
|
||||
for (const KeyValue<Variant, Variant> &kv : snap_data) {
|
||||
Node *node = Object::cast_to<Node>(kv.key);
|
||||
Node3D *sp = Object::cast_to<Node3D>(node);
|
||||
Dictionary d = snap_data[node];
|
||||
Dictionary d = kv.value;
|
||||
Vector3 from = d["from"];
|
||||
Vector3 to = from - Vector3(0.0, max_snap_height, 0.0);
|
||||
HashSet<RID> excluded = _get_physics_bodies_rid(sp);
|
||||
|
||||
@ -92,11 +92,10 @@ bpy.ops.export_scene.gltf(**opts['gltf_options'])
|
||||
|
||||
String dict_to_python(const Dictionary &p_dict) {
|
||||
String entries;
|
||||
Array dict_keys = p_dict.keys();
|
||||
for (int i = 0; i < dict_keys.size(); i++) {
|
||||
const String key = dict_keys[i];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_dict) {
|
||||
const String &key = kv.key;
|
||||
String value;
|
||||
Variant raw_value = p_dict[key];
|
||||
const Variant &raw_value = kv.value;
|
||||
|
||||
switch (raw_value.get_type()) {
|
||||
case Variant::Type::BOOL: {
|
||||
@ -125,11 +124,10 @@ String dict_to_python(const Dictionary &p_dict) {
|
||||
|
||||
String dict_to_xmlrpc(const Dictionary &p_dict) {
|
||||
String members;
|
||||
Array dict_keys = p_dict.keys();
|
||||
for (int i = 0; i < dict_keys.size(); i++) {
|
||||
const String key = dict_keys[i];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_dict) {
|
||||
const String &key = kv.key;
|
||||
String value;
|
||||
Variant raw_value = p_dict[key];
|
||||
const Variant &raw_value = kv.value;
|
||||
|
||||
switch (raw_value.get_type()) {
|
||||
case Variant::Type::BOOL: {
|
||||
|
||||
@ -84,9 +84,8 @@ static Dictionary to_dictionary(const HashMap<K, V> &p_inp) {
|
||||
template <typename K, typename V>
|
||||
static void set_from_dictionary(HashMap<K, V> &r_out, const Dictionary &p_inp) {
|
||||
r_out.clear();
|
||||
Array keys = p_inp.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
r_out[keys[i]] = p_inp[keys[i]];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_inp) {
|
||||
r_out[kv.key] = kv.value;
|
||||
}
|
||||
}
|
||||
} //namespace GLTFTemplateConvert
|
||||
|
||||
@ -145,9 +145,8 @@ Dictionary GLTFSkin::get_joint_i_to_name() {
|
||||
|
||||
void GLTFSkin::set_joint_i_to_name(Dictionary p_joint_i_to_name) {
|
||||
joint_i_to_name = HashMap<int, StringName>();
|
||||
Array keys = p_joint_i_to_name.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
joint_i_to_name[keys[i]] = p_joint_i_to_name[keys[i]];
|
||||
for (const KeyValue<Variant, Variant> &kv : p_joint_i_to_name) {
|
||||
joint_i_to_name[kv.key] = kv.value;
|
||||
}
|
||||
}
|
||||
|
||||
@ -205,18 +204,18 @@ Error GLTFSkin::from_dictionary(const Dictionary &dict) {
|
||||
ERR_FAIL_COND_V(!dict.has("joint_i_to_bone_i"), ERR_INVALID_DATA);
|
||||
Dictionary joint_i_to_bone_i_dict = dict["joint_i_to_bone_i"];
|
||||
joint_i_to_bone_i.clear();
|
||||
for (int i = 0; i < joint_i_to_bone_i_dict.keys().size(); ++i) {
|
||||
int key = joint_i_to_bone_i_dict.keys()[i];
|
||||
int value = joint_i_to_bone_i_dict[key];
|
||||
for (const KeyValue<Variant, Variant> &kv : joint_i_to_bone_i_dict) {
|
||||
int key = kv.key;
|
||||
int value = kv.value;
|
||||
joint_i_to_bone_i[key] = value;
|
||||
}
|
||||
|
||||
ERR_FAIL_COND_V(!dict.has("joint_i_to_name"), ERR_INVALID_DATA);
|
||||
Dictionary joint_i_to_name_dict = dict["joint_i_to_name"];
|
||||
joint_i_to_name.clear();
|
||||
for (int i = 0; i < joint_i_to_name_dict.keys().size(); ++i) {
|
||||
int key = joint_i_to_name_dict.keys()[i];
|
||||
StringName value = joint_i_to_name_dict[key];
|
||||
for (const KeyValue<Variant, Variant> &kv : joint_i_to_name_dict) {
|
||||
int key = kv.key;
|
||||
StringName value = kv.value;
|
||||
joint_i_to_name[key] = value;
|
||||
}
|
||||
if (dict.has("godot_skin")) {
|
||||
|
||||
@ -54,10 +54,9 @@ void OpenXRSelectRuntime::_update_items() {
|
||||
set_item_metadata(index, "");
|
||||
index++;
|
||||
|
||||
Array keys = runtimes.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
String key = keys[i];
|
||||
String path = runtimes[key];
|
||||
for (const KeyValue<Variant, Variant> &kv : runtimes) {
|
||||
const String &key = kv.key;
|
||||
const String &path = kv.value;
|
||||
String adj_path = path.replace("~", home_folder);
|
||||
|
||||
if (da->file_exists(adj_path)) {
|
||||
|
||||
@ -84,11 +84,9 @@ HashMap<String, bool *> OpenXRExtensionWrapper::get_requested_extensions() {
|
||||
|
||||
if (GDVIRTUAL_CALL(_get_requested_extensions, request_extension)) {
|
||||
HashMap<String, bool *> result;
|
||||
Array keys = request_extension.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
String key = keys.get(i);
|
||||
GDExtensionPtr<bool> value = VariantCaster<GDExtensionPtr<bool>>::cast(request_extension.get(key, GDExtensionPtr<bool>(nullptr)));
|
||||
result.insert(key, value);
|
||||
for (const KeyValue<Variant, Variant> &kv : request_extension) {
|
||||
GDExtensionPtr<bool> value = VariantCaster<GDExtensionPtr<bool>>::cast(kv.value);
|
||||
result.insert(kv.key, value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1247,9 +1247,8 @@ void CodeEdit::add_auto_brace_completion_pair(const String &p_open_key, const St
|
||||
void CodeEdit::set_auto_brace_completion_pairs(const Dictionary &p_auto_brace_completion_pairs) {
|
||||
auto_brace_completion_pairs.clear();
|
||||
|
||||
Array keys = p_auto_brace_completion_pairs.keys();
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
add_auto_brace_completion_pair(keys[i], p_auto_brace_completion_pairs[keys[i]]);
|
||||
for (const KeyValue<Variant, Variant> &kv : p_auto_brace_completion_pairs) {
|
||||
add_auto_brace_completion_pair(kv.key, kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user