2014-02-10 02:10:30 +01:00
/*************************************************************************/
/* editor_file_system.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
2017-08-27 14:16:55 +02:00
/* https://godotengine.org */
2014-02-10 02:10:30 +01:00
/*************************************************************************/
2019-01-01 12:53:14 +01:00
/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
2014-02-10 02:10:30 +01:00
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
2018-01-05 00:50:27 +01:00
2014-02-10 02:10:30 +01:00
# include "editor_file_system.h"
2017-01-16 08:04:19 +01:00
2019-02-12 13:30:56 +01:00
# include "core/io/resource_importer.h"
2018-09-11 18:13:45 +02:00
# include "core/io/resource_loader.h"
# include "core/io/resource_saver.h"
# include "core/os/file_access.h"
# include "core/os/os.h"
# include "core/project_settings.h"
# include "core/variant_parser.h"
2017-03-05 16:44:50 +01:00
# include "editor_node.h"
# include "editor_resource_preview.h"
# include "editor_settings.h"
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystem * EditorFileSystem : : singleton = NULL ;
2019-02-26 22:43:37 +01:00
//the name is the version, to keep compatibility with different versions of Godot
2019-04-19 20:54:33 +02:00
# define CACHE_FILE_NAME "filesystem_cache6"
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
void EditorFileSystemDirectory : : sort_files ( ) {
files . sort_custom < FileInfoSort > ( ) ;
}
2017-03-05 16:44:50 +01:00
int EditorFileSystemDirectory : : find_file_index ( const String & p_file ) const {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < files . size ( ) ; i + + ) {
if ( files [ i ] - > file = = p_file )
2016-01-05 14:36:24 +01:00
return i ;
}
return - 1 ;
}
2017-03-05 16:44:50 +01:00
int EditorFileSystemDirectory : : find_dir_index ( const String & p_dir ) const {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < subdirs . size ( ) ; i + + ) {
if ( subdirs [ i ] - > name = = p_dir )
2016-01-05 14:36:24 +01:00
return i ;
}
return - 1 ;
}
2014-02-10 02:10:30 +01:00
int EditorFileSystemDirectory : : get_subdir_count ( ) const {
return subdirs . size ( ) ;
}
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * EditorFileSystemDirectory : : get_subdir ( int p_idx ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
ERR_FAIL_INDEX_V ( p_idx , subdirs . size ( ) , NULL ) ;
2014-02-10 02:10:30 +01:00
return subdirs [ p_idx ] ;
}
2017-03-05 16:44:50 +01:00
int EditorFileSystemDirectory : : get_file_count ( ) const {
2014-02-10 02:10:30 +01:00
return files . size ( ) ;
}
2017-03-05 16:44:50 +01:00
String EditorFileSystemDirectory : : get_file ( int p_idx ) const {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
ERR_FAIL_INDEX_V ( p_idx , files . size ( ) , " " ) ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
return files [ p_idx ] - > file ;
2014-02-10 02:10:30 +01:00
}
String EditorFileSystemDirectory : : get_path ( ) const {
String p ;
2017-03-05 16:44:50 +01:00
const EditorFileSystemDirectory * d = this ;
while ( d - > parent ) {
2019-06-16 14:31:57 +02:00
p = d - > name . plus_file ( p ) ;
2017-03-05 16:44:50 +01:00
d = d - > parent ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
return " res:// " + p ;
2014-02-10 02:10:30 +01:00
}
String EditorFileSystemDirectory : : get_file_path ( int p_idx ) const {
String file = get_file ( p_idx ) ;
2017-03-05 16:44:50 +01:00
const EditorFileSystemDirectory * d = this ;
while ( d - > parent ) {
2019-06-16 14:31:57 +02:00
file = d - > name . plus_file ( file ) ;
2017-03-05 16:44:50 +01:00
d = d - > parent ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
return " res:// " + file ;
2014-02-10 02:10:30 +01:00
}
2015-08-24 01:15:56 +02:00
Vector < String > EditorFileSystemDirectory : : get_file_deps ( int p_idx ) const {
2017-03-05 16:44:50 +01:00
ERR_FAIL_INDEX_V ( p_idx , files . size ( ) , Vector < String > ( ) ) ;
2017-02-01 13:45:45 +01:00
return files [ p_idx ] - > deps ;
2015-08-24 01:15:56 +02:00
}
2016-05-27 19:18:40 +02:00
2017-08-30 01:17:34 +02:00
bool EditorFileSystemDirectory : : get_file_import_is_valid ( int p_idx ) const {
ERR_FAIL_INDEX_V ( p_idx , files . size ( ) , false ) ;
return files [ p_idx ] - > import_valid ;
}
2018-07-16 00:29:00 +02:00
String EditorFileSystemDirectory : : get_file_script_class_name ( int p_idx ) const {
return files [ p_idx ] - > script_class_name ;
}
String EditorFileSystemDirectory : : get_file_script_class_extends ( int p_idx ) const {
return files [ p_idx ] - > script_class_extends ;
}
2018-07-29 05:36:43 +02:00
String EditorFileSystemDirectory : : get_file_script_class_icon_path ( int p_idx ) const {
return files [ p_idx ] - > script_class_icon_path ;
}
2015-08-24 01:15:56 +02:00
StringName EditorFileSystemDirectory : : get_file_type ( int p_idx ) const {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
ERR_FAIL_INDEX_V ( p_idx , files . size ( ) , " " ) ;
2016-01-05 14:36:24 +01:00
return files [ p_idx ] - > type ;
2014-02-10 02:10:30 +01:00
}
String EditorFileSystemDirectory : : get_name ( ) {
return name ;
}
EditorFileSystemDirectory * EditorFileSystemDirectory : : get_parent ( ) {
return parent ;
}
void EditorFileSystemDirectory : : _bind_methods ( ) {
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_subdir_count " ) , & EditorFileSystemDirectory : : get_subdir_count ) ;
2017-08-09 13:19:41 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_subdir " , " idx " ) , & EditorFileSystemDirectory : : get_subdir ) ;
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_file_count " ) , & EditorFileSystemDirectory : : get_file_count ) ;
ClassDB : : bind_method ( D_METHOD ( " get_file " , " idx " ) , & EditorFileSystemDirectory : : get_file ) ;
ClassDB : : bind_method ( D_METHOD ( " get_file_path " , " idx " ) , & EditorFileSystemDirectory : : get_file_path ) ;
ClassDB : : bind_method ( D_METHOD ( " get_file_type " , " idx " ) , & EditorFileSystemDirectory : : get_file_type ) ;
2018-07-16 00:29:00 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_file_script_class_name " , " idx " ) , & EditorFileSystemDirectory : : get_file_script_class_name ) ;
ClassDB : : bind_method ( D_METHOD ( " get_file_script_class_extends " , " idx " ) , & EditorFileSystemDirectory : : get_file_script_class_extends ) ;
2017-08-30 01:17:34 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_file_import_is_valid " , " idx " ) , & EditorFileSystemDirectory : : get_file_import_is_valid ) ;
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_name " ) , & EditorFileSystemDirectory : : get_name ) ;
ClassDB : : bind_method ( D_METHOD ( " get_path " ) , & EditorFileSystemDirectory : : get_path ) ;
2017-08-09 13:19:41 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_parent " ) , & EditorFileSystemDirectory : : get_parent ) ;
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " find_file_index " , " name " ) , & EditorFileSystemDirectory : : find_file_index ) ;
ClassDB : : bind_method ( D_METHOD ( " find_dir_index " , " name " ) , & EditorFileSystemDirectory : : find_dir_index ) ;
2014-02-10 02:10:30 +01:00
}
EditorFileSystemDirectory : : EditorFileSystemDirectory ( ) {
2017-03-05 16:44:50 +01:00
modified_time = 0 ;
parent = NULL ;
2017-06-16 22:32:46 +02:00
verified = false ;
2014-02-10 02:10:30 +01:00
}
EditorFileSystemDirectory : : ~ EditorFileSystemDirectory ( ) {
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < files . size ( ) ; i + + ) {
2016-01-05 14:36:24 +01:00
memdelete ( files [ i ] ) ;
}
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < subdirs . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
memdelete ( subdirs [ i ] ) ;
}
}
2016-01-05 14:36:24 +01:00
void EditorFileSystem : : _scan_filesystem ( ) {
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
ERR_FAIL_COND ( ! scanning | | new_filesystem ) ;
2014-02-10 02:10:30 +01:00
//read .fscache
String cpath ;
sources_changed . clear ( ) ;
2016-01-05 14:36:24 +01:00
file_cache . clear ( ) ;
2014-02-10 02:10:30 +01:00
2017-07-19 22:00:46 +02:00
String project = ProjectSettings : : get_singleton ( ) - > get_resource_path ( ) ;
2015-06-06 14:44:38 +02:00
2019-02-26 22:43:37 +01:00
String fscache = EditorSettings : : get_singleton ( ) - > get_project_settings_dir ( ) . plus_file ( CACHE_FILE_NAME ) ;
2017-03-05 16:44:50 +01:00
FileAccess * f = FileAccess : : open ( fscache , FileAccess : : READ ) ;
2014-02-10 02:10:30 +01:00
2019-02-26 22:43:37 +01:00
bool first = true ;
2014-02-10 02:10:30 +01:00
if ( f ) {
//read the disk cache
2017-03-05 16:44:50 +01:00
while ( ! f - > eof_reached ( ) ) {
2014-02-10 02:10:30 +01:00
String l = f - > get_line ( ) . strip_edges ( ) ;
2019-02-26 22:43:37 +01:00
if ( first ) {
if ( first_scan ) {
// only use this on first scan, afterwards it gets ignored
// this is so on first reimport we synchronize versions, then
2019-05-19 12:34:40 +02:00
// we don't care until editor restart. This is for usability mainly so
2019-02-26 22:43:37 +01:00
// your workflow is not killed after changing a setting by forceful reimporting
// everything there is.
filesystem_settings_version_for_import = l . strip_edges ( ) ;
if ( filesystem_settings_version_for_import ! = ResourceFormatImporter : : get_singleton ( ) - > get_import_settings_hash ( ) ) {
revalidate_import_files = true ;
}
}
first = false ;
continue ;
}
2017-03-05 16:44:50 +01:00
if ( l = = String ( ) )
2014-02-10 02:10:30 +01:00
continue ;
if ( l . begins_with ( " :: " ) ) {
Vector < String > split = l . split ( " :: " ) ;
2017-03-05 16:44:50 +01:00
ERR_CONTINUE ( split . size ( ) ! = 3 ) ;
2014-02-10 02:10:30 +01:00
String name = split [ 1 ] ;
2017-03-05 16:44:50 +01:00
cpath = name ;
2014-02-10 02:10:30 +01:00
} else {
Vector < String > split = l . split ( " :: " ) ;
2019-04-19 20:54:33 +02:00
ERR_CONTINUE ( split . size ( ) ! = 8 ) ;
2014-02-10 02:10:30 +01:00
String name = split [ 0 ] ;
String file ;
2017-03-05 16:44:50 +01:00
file = name ;
name = cpath . plus_file ( name ) ;
2014-02-10 02:10:30 +01:00
FileCache fc ;
2017-03-05 16:44:50 +01:00
fc . type = split [ 1 ] ;
fc . modification_time = split [ 2 ] . to_int64 ( ) ;
2017-02-01 13:45:45 +01:00
fc . import_modification_time = split [ 3 ] . to_int64 ( ) ;
2017-08-30 00:50:58 +02:00
fc . import_valid = split [ 4 ] . to_int64 ( ) ! = 0 ;
2019-04-19 20:54:33 +02:00
fc . import_group_file = split [ 5 ] . strip_edges ( ) ;
fc . script_class_name = split [ 6 ] . get_slice ( " <> " , 0 ) ;
fc . script_class_extends = split [ 6 ] . get_slice ( " <> " , 1 ) ;
fc . script_class_icon_path = split [ 6 ] . get_slice ( " <> " , 2 ) ;
2014-02-10 02:10:30 +01:00
2019-04-19 20:54:33 +02:00
String deps = split [ 7 ] . strip_edges ( ) ;
2015-08-24 01:15:56 +02:00
if ( deps . length ( ) ) {
Vector < String > dp = deps . split ( " <> " ) ;
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < dp . size ( ) ; i + + ) {
String path = dp [ i ] ;
2017-02-01 13:45:45 +01:00
fc . deps . push_back ( path ) ;
2015-08-24 01:15:56 +02:00
}
}
2017-03-05 16:44:50 +01:00
file_cache [ name ] = fc ;
2014-02-10 02:10:30 +01:00
}
}
f - > close ( ) ;
memdelete ( f ) ;
}
2018-07-16 00:29:00 +02:00
String update_cache = EditorSettings : : get_singleton ( ) - > get_project_settings_dir ( ) . plus_file ( " filesystem_update4 " ) ;
2017-08-17 22:02:43 +02:00
if ( FileAccess : : exists ( update_cache ) ) {
{
2019-02-12 21:10:08 +01:00
FileAccessRef f2 = FileAccess : : open ( update_cache , FileAccess : : READ ) ;
String l = f2 - > get_line ( ) . strip_edges ( ) ;
2017-08-17 22:02:43 +02:00
while ( l ! = String ( ) ) {
file_cache . erase ( l ) ; //erase cache for this, so it gets updated
2019-02-12 21:10:08 +01:00
l = f2 - > get_line ( ) . strip_edges ( ) ;
2017-08-17 22:02:43 +02:00
}
}
DirAccessRef d = DirAccess : : create ( DirAccess : : ACCESS_FILESYSTEM ) ;
d - > remove ( update_cache ) ; //bye bye update cache
}
2017-03-05 16:44:50 +01:00
EditorProgressBG scan_progress ( " efs " , " ScanFS " , 1000 ) ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
ScanProgress sp ;
2017-03-05 16:44:50 +01:00
sp . low = 0 ;
sp . hi = 1 ;
sp . progress = & scan_progress ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
new_filesystem = memnew ( EditorFileSystemDirectory ) ;
new_filesystem - > parent = NULL ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
DirAccess * d = DirAccess : : create ( DirAccess : : ACCESS_RESOURCES ) ;
d - > change_dir ( " res:// " ) ;
2017-03-05 16:44:50 +01:00
_scan_new_dir ( new_filesystem , d , sp ) ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
file_cache . clear ( ) ; //clear caches, no longer needed
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
memdelete ( d ) ;
2014-02-10 02:10:30 +01:00
2019-02-26 22:43:37 +01:00
if ( ! first_scan ) {
//on the first scan this is done from the main thread after re-importing
_save_filesystem_cache ( ) ;
2018-03-10 20:47:10 +01:00
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
scanning = false ;
2014-02-10 02:10:30 +01:00
}
2017-02-01 13:45:45 +01:00
void EditorFileSystem : : _save_filesystem_cache ( ) {
2019-04-19 20:54:33 +02:00
group_file_cache . clear ( ) ;
2019-02-26 22:43:37 +01:00
String fscache = EditorSettings : : get_singleton ( ) - > get_project_settings_dir ( ) . plus_file ( CACHE_FILE_NAME ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
FileAccess * f = FileAccess : : open ( fscache , FileAccess : : WRITE ) ;
2018-03-10 20:47:10 +01:00
if ( f = = NULL ) {
ERR_PRINTS ( " Error writing fscache: " + fscache ) ;
} else {
2019-02-26 22:43:37 +01:00
f - > store_line ( filesystem_settings_version_for_import ) ;
2018-03-10 20:47:10 +01:00
_save_filesystem_cache ( filesystem , f ) ;
f - > close ( ) ;
memdelete ( f ) ;
}
2017-02-01 13:45:45 +01:00
}
2014-02-10 02:10:30 +01:00
void EditorFileSystem : : _thread_func ( void * _userdata ) {
2017-03-05 16:44:50 +01:00
EditorFileSystem * sd = ( EditorFileSystem * ) _userdata ;
2016-01-05 14:36:24 +01:00
sd - > _scan_filesystem ( ) ;
}
2017-11-24 17:39:41 +01:00
bool EditorFileSystem : : _test_for_reimport ( const String & p_path , bool p_only_imported_files ) {
if ( ! reimport_on_missing_imported_files & & p_only_imported_files )
return false ;
2019-02-27 13:36:23 +01:00
if ( ! FileAccess : : exists ( p_path + " .import " ) ) {
2017-11-24 17:39:41 +01:00
return true ;
}
2019-02-26 22:43:37 +01:00
if ( ! ResourceFormatImporter : : get_singleton ( ) - > are_import_settings_valid ( p_path ) ) {
//reimport settings are not valid, reimport
return true ;
}
2019-02-27 13:36:23 +01:00
Error err ;
FileAccess * f = FileAccess : : open ( p_path + " .import " , FileAccess : : READ , & err ) ;
if ( ! f ) { //no import file, do reimport
return true ;
}
2017-11-24 17:39:41 +01:00
VariantParser : : StreamFile stream ;
stream . f = f ;
String assign ;
Variant value ;
VariantParser : : Tag next_tag ;
int lines = 0 ;
String error_text ;
List < String > to_check ;
2018-03-01 09:39:40 +01:00
String source_file = " " ;
String source_md5 = " " ;
2017-12-27 19:21:18 +01:00
Vector < String > dest_files ;
2018-03-01 09:39:40 +01:00
String dest_md5 = " " ;
2017-11-24 17:39:41 +01:00
while ( true ) {
assign = Variant ( ) ;
next_tag . fields . clear ( ) ;
next_tag . name = String ( ) ;
err = VariantParser : : parse_tag_assign_eof ( & stream , lines , error_text , next_tag , assign , value , NULL , true ) ;
if ( err = = ERR_FILE_EOF ) {
break ;
} else if ( err ! = OK ) {
ERR_PRINTS ( " ResourceFormatImporter::load - " + p_path + " .import: " + itos ( lines ) + " error: " + error_text ) ;
memdelete ( f ) ;
return false ; //parse error, try reimport manually (Avoid reimport loop on broken file)
}
if ( assign ! = String ( ) ) {
if ( assign . begins_with ( " path " ) ) {
to_check . push_back ( value ) ;
} else if ( assign = = " files " ) {
Array fa = value ;
for ( int i = 0 ; i < fa . size ( ) ; i + + ) {
to_check . push_back ( fa [ i ] ) ;
}
2017-12-27 19:21:18 +01:00
} else if ( ! p_only_imported_files ) {
2018-03-01 09:39:40 +01:00
if ( assign = = " source_file " ) {
2017-12-27 19:21:18 +01:00
source_file = value ;
} else if ( assign = = " dest_files " ) {
dest_files = value ;
}
2017-11-24 17:39:41 +01:00
}
} else if ( next_tag . name ! = " remap " & & next_tag . name ! = " deps " ) {
break ;
}
}
memdelete ( f ) ;
2018-09-13 03:38:39 +02:00
// Read the md5's from a separate file (so the import parameters aren't dependent on the file version
2018-03-01 09:39:40 +01:00
String base_path = ResourceFormatImporter : : get_singleton ( ) - > get_import_base_path ( p_path ) ;
FileAccess * md5s = FileAccess : : open ( base_path + " .md5 " , FileAccess : : READ , & err ) ;
if ( ! md5s ) { // No md5's stored for this resource
return true ;
}
VariantParser : : StreamFile md5_stream ;
md5_stream . f = md5s ;
while ( true ) {
assign = Variant ( ) ;
next_tag . fields . clear ( ) ;
next_tag . name = String ( ) ;
err = VariantParser : : parse_tag_assign_eof ( & md5_stream , lines , error_text , next_tag , assign , value , NULL , true ) ;
if ( err = = ERR_FILE_EOF ) {
break ;
} else if ( err ! = OK ) {
ERR_PRINTS ( " ResourceFormatImporter::load - " + p_path + " .import.md5: " + itos ( lines ) + " error: " + error_text ) ;
memdelete ( md5s ) ;
return false ; // parse error
}
if ( assign ! = String ( ) ) {
if ( ! p_only_imported_files ) {
if ( assign = = " source_md5 " ) {
source_md5 = value ;
} else if ( assign = = " dest_md5 " ) {
dest_md5 = value ;
}
}
}
}
memdelete ( md5s ) ;
2017-11-24 17:39:41 +01:00
//imported files are gone, reimport
for ( List < String > : : Element * E = to_check . front ( ) ; E ; E = E - > next ( ) ) {
if ( ! FileAccess : : exists ( E - > get ( ) ) ) {
return true ;
}
}
//check source md5 matching
if ( ! p_only_imported_files ) {
2017-12-27 19:21:18 +01:00
if ( source_file ! = String ( ) & & source_file ! = p_path ) {
return true ; //file was moved, reimport
}
2017-11-24 17:39:41 +01:00
if ( source_md5 = = String ( ) ) {
return true ; //lacks md5, so just reimport
}
String md5 = FileAccess : : get_md5 ( p_path ) ;
if ( md5 ! = source_md5 ) {
return true ;
}
2017-12-27 19:21:18 +01:00
if ( dest_files . size ( ) & & dest_md5 ! = String ( ) ) {
md5 = FileAccess : : get_multiple_md5 ( dest_files ) ;
if ( md5 ! = dest_md5 ) {
return true ;
}
}
2017-11-24 17:39:41 +01:00
}
return false ; //nothing changed
}
2016-01-05 14:36:24 +01:00
bool EditorFileSystem : : _update_scan_actions ( ) {
sources_changed . clear ( ) ;
2017-03-05 16:44:50 +01:00
bool fs_changed = false ;
2016-01-05 14:36:24 +01:00
2017-02-01 13:45:45 +01:00
Vector < String > reimports ;
2018-11-21 01:47:48 +01:00
Vector < String > reloads ;
2017-02-01 13:45:45 +01:00
2017-03-05 16:44:50 +01:00
for ( List < ItemAction > : : Element * E = scan_actions . front ( ) ; E ; E = E - > next ( ) ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
ItemAction & ia = E - > get ( ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
switch ( ia . action ) {
2016-01-05 14:36:24 +01:00
case ItemAction : : ACTION_NONE : {
} break ;
case ItemAction : : ACTION_DIR_ADD : {
2017-03-05 16:44:50 +01:00
int idx = 0 ;
for ( int i = 0 ; i < ia . dir - > subdirs . size ( ) ; i + + ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
if ( ia . new_dir - > name < ia . dir - > subdirs [ i ] - > name )
2016-01-05 14:36:24 +01:00
break ;
idx + + ;
}
2017-03-05 16:44:50 +01:00
if ( idx = = ia . dir - > subdirs . size ( ) ) {
2016-01-05 14:36:24 +01:00
ia . dir - > subdirs . push_back ( ia . new_dir ) ;
} else {
2017-03-05 16:44:50 +01:00
ia . dir - > subdirs . insert ( idx , ia . new_dir ) ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
fs_changed = true ;
2016-01-05 14:36:24 +01:00
} break ;
case ItemAction : : ACTION_DIR_REMOVE : {
ERR_CONTINUE ( ! ia . dir - > parent ) ;
ia . dir - > parent - > subdirs . erase ( ia . dir ) ;
2017-03-05 16:44:50 +01:00
memdelete ( ia . dir ) ;
fs_changed = true ;
2016-01-05 14:36:24 +01:00
} break ;
case ItemAction : : ACTION_FILE_ADD : {
2017-03-05 16:44:50 +01:00
int idx = 0 ;
for ( int i = 0 ; i < ia . dir - > files . size ( ) ; i + + ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
if ( ia . new_file - > file < ia . dir - > files [ i ] - > file )
2016-01-05 14:36:24 +01:00
break ;
idx + + ;
}
2017-03-05 16:44:50 +01:00
if ( idx = = ia . dir - > files . size ( ) ) {
2016-01-05 14:36:24 +01:00
ia . dir - > files . push_back ( ia . new_file ) ;
} else {
2017-03-05 16:44:50 +01:00
ia . dir - > files . insert ( idx , ia . new_file ) ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
fs_changed = true ;
2016-01-05 14:36:24 +01:00
} break ;
case ItemAction : : ACTION_FILE_REMOVE : {
int idx = ia . dir - > find_file_index ( ia . file ) ;
2017-03-05 16:44:50 +01:00
ERR_CONTINUE ( idx = = - 1 ) ;
2017-08-15 01:13:14 +02:00
_delete_internal_files ( ia . dir - > files [ idx ] - > file ) ;
2017-03-05 16:44:50 +01:00
memdelete ( ia . dir - > files [ idx ] ) ;
2016-01-05 14:36:24 +01:00
ia . dir - > files . remove ( idx ) ;
2017-03-05 16:44:50 +01:00
fs_changed = true ;
2016-01-05 14:36:24 +01:00
} break ;
2017-11-24 17:39:41 +01:00
case ItemAction : : ACTION_FILE_TEST_REIMPORT : {
2017-02-01 13:45:45 +01:00
2016-01-05 14:36:24 +01:00
int idx = ia . dir - > find_file_index ( ia . file ) ;
2017-03-05 16:44:50 +01:00
ERR_CONTINUE ( idx = = - 1 ) ;
2016-01-05 14:36:24 +01:00
String full_path = ia . dir - > get_file_path ( idx ) ;
2017-11-24 17:39:41 +01:00
if ( _test_for_reimport ( full_path , false ) ) {
//must reimport
reimports . push_back ( full_path ) ;
} else {
//must not reimport, all was good
//update modified times, to avoid reimport
ia . dir - > files [ idx ] - > modified_time = FileAccess : : get_modified_time ( full_path ) ;
ia . dir - > files [ idx ] - > import_modified_time = FileAccess : : get_modified_time ( full_path + " .import " ) ;
}
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
fs_changed = true ;
2016-01-05 14:36:24 +01:00
} break ;
2018-11-21 01:47:48 +01:00
case ItemAction : : ACTION_FILE_RELOAD : {
int idx = ia . dir - > find_file_index ( ia . file ) ;
ERR_CONTINUE ( idx = = - 1 ) ;
String full_path = ia . dir - > get_file_path ( idx ) ;
reloads . push_back ( full_path ) ;
} break ;
2016-01-05 14:36:24 +01:00
}
}
2017-02-01 13:45:45 +01:00
if ( reimports . size ( ) ) {
reimport_files ( reimports ) ;
}
2018-11-21 01:47:48 +01:00
2019-02-26 22:43:37 +01:00
if ( first_scan ) {
//only on first scan this is valid and updated, then settings changed.
revalidate_import_files = false ;
filesystem_settings_version_for_import = ResourceFormatImporter : : get_singleton ( ) - > get_import_settings_hash ( ) ;
_save_filesystem_cache ( ) ;
}
2018-11-21 01:47:48 +01:00
if ( reloads . size ( ) ) {
emit_signal ( " resources_reload " , reloads ) ;
}
2016-01-05 14:36:24 +01:00
scan_actions . clear ( ) ;
return fs_changed ;
2014-02-10 02:10:30 +01:00
}
void EditorFileSystem : : scan ( ) {
2017-03-05 16:44:50 +01:00
if ( false /*&& bool(Globals::get_singleton()->get("debug/disable_scan"))*/ )
return ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( scanning | | scanning_changes | | thread )
2014-02-10 02:10:30 +01:00
return ;
2017-02-01 13:45:45 +01:00
_update_extensions ( ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
abort_scan = false ;
2014-02-10 02:10:30 +01:00
if ( ! use_threads ) {
2017-03-05 16:44:50 +01:00
scanning = true ;
scan_total = 0 ;
2016-01-05 14:36:24 +01:00
_scan_filesystem ( ) ;
2014-02-10 02:10:30 +01:00
if ( filesystem )
memdelete ( filesystem ) ;
2017-01-14 12:26:56 +01:00
//file_type_cache.clear();
2017-03-05 16:44:50 +01:00
filesystem = new_filesystem ;
new_filesystem = NULL ;
2016-01-05 14:36:24 +01:00
_update_scan_actions ( ) ;
2017-03-05 16:44:50 +01:00
scanning = false ;
2014-02-10 02:10:30 +01:00
emit_signal ( " filesystem_changed " ) ;
2017-03-05 16:44:50 +01:00
emit_signal ( " sources_changed " , sources_changed . size ( ) > 0 ) ;
2018-07-16 00:29:00 +02:00
_queue_update_script_classes ( ) ;
2019-02-26 22:43:37 +01:00
first_scan = false ;
2014-02-10 02:10:30 +01:00
} else {
ERR_FAIL_COND ( thread ) ;
set_process ( true ) ;
Thread : : Settings s ;
2017-03-05 16:44:50 +01:00
scanning = true ;
scan_total = 0 ;
s . priority = Thread : : PRIORITY_LOW ;
thread = Thread : : create ( _thread_func , this , s ) ;
2014-02-10 02:10:30 +01:00
//tree->hide();
//progress->show();
}
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : ScanProgress : : update ( int p_current , int p_total ) const {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
float ratio = low + ( ( hi - low ) / p_total ) * p_current ;
progress - > step ( ratio * 1000 ) ;
EditorFileSystem : : singleton - > scan_total = ratio ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
EditorFileSystem : : ScanProgress EditorFileSystem : : ScanProgress : : get_sub ( int p_current , int p_total ) const {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
ScanProgress sp = * this ;
float slice = ( sp . hi - sp . low ) / p_total ;
sp . low + = slice * p_current ;
sp . hi = slice ;
2016-01-05 14:36:24 +01:00
return sp ;
2017-02-02 00:41:05 +01:00
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : _scan_new_dir ( EditorFileSystemDirectory * p_dir , DirAccess * da , const ScanProgress & p_progress ) {
2016-01-05 14:36:24 +01:00
List < String > dirs ;
List < String > files ;
String cd = da - > get_current_dir ( ) ;
p_dir - > modified_time = FileAccess : : get_modified_time ( cd ) ;
da - > list_dir_begin ( ) ;
while ( true ) {
bool isdir ;
String f = da - > get_next ( & isdir ) ;
2017-03-05 16:44:50 +01:00
if ( f = = " " )
2016-01-05 14:36:24 +01:00
break ;
if ( isdir ) {
if ( f . begins_with ( " . " ) ) //ignore hidden and . / ..
continue ;
2017-05-01 17:44:52 +02:00
if ( FileAccess : : exists ( cd . plus_file ( f ) . plus_file ( " project.godot " ) ) ) // skip if another project inside this
2016-01-05 14:36:24 +01:00
continue ;
2017-08-06 16:43:48 +02:00
if ( FileAccess : : exists ( cd . plus_file ( f ) . plus_file ( " .gdignore " ) ) ) // skip if another project inside this
continue ;
2016-01-05 14:36:24 +01:00
dirs . push_back ( f ) ;
} else {
files . push_back ( f ) ;
}
}
da - > list_dir_end ( ) ;
2017-05-11 21:07:59 +02:00
dirs . sort_custom < NaturalNoCaseComparator > ( ) ;
files . sort_custom < NaturalNoCaseComparator > ( ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
int total = dirs . size ( ) + files . size ( ) ;
int idx = 0 ;
2016-06-28 13:26:07 +02:00
2017-03-05 16:44:50 +01:00
for ( List < String > : : Element * E = dirs . front ( ) ; E ; E = E - > next ( ) , idx + + ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
if ( da - > change_dir ( E - > get ( ) ) = = OK ) {
2016-01-05 14:36:24 +01:00
2016-06-28 13:26:07 +02:00
String d = da - > get_current_dir ( ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
if ( d = = cd | | ! d . begins_with ( cd ) ) {
2016-06-28 13:26:07 +02:00
da - > change_dir ( cd ) ; //avoid recursion
} else {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * efd = memnew ( EditorFileSystemDirectory ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
efd - > parent = p_dir ;
efd - > name = E - > get ( ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
_scan_new_dir ( efd , da , p_progress . get_sub ( idx , total ) ) ;
2016-06-28 13:26:07 +02:00
2019-02-12 21:10:08 +01:00
int idx2 = 0 ;
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > subdirs . size ( ) ; i + + ) {
2016-06-28 13:26:07 +02:00
2017-03-05 16:44:50 +01:00
if ( efd - > name < p_dir - > subdirs [ i ] - > name )
2016-06-28 13:26:07 +02:00
break ;
2019-02-12 21:10:08 +01:00
idx2 + + ;
2016-06-28 13:26:07 +02:00
}
2019-02-12 21:10:08 +01:00
if ( idx2 = = p_dir - > subdirs . size ( ) ) {
2016-06-28 13:26:07 +02:00
p_dir - > subdirs . push_back ( efd ) ;
} else {
2019-02-12 21:10:08 +01:00
p_dir - > subdirs . insert ( idx2 , efd ) ;
2016-06-28 13:26:07 +02:00
}
da - > change_dir ( " .. " ) ;
}
2016-01-05 14:36:24 +01:00
} else {
2017-03-05 16:44:50 +01:00
ERR_PRINTS ( " Cannot go into subdir: " + E - > get ( ) ) ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
p_progress . update ( idx , total ) ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
for ( List < String > : : Element * E = files . front ( ) ; E ; E = E - > next ( ) , idx + + ) {
2017-02-01 13:45:45 +01:00
2017-01-14 04:51:09 +01:00
String ext = E - > get ( ) . get_extension ( ) . to_lower ( ) ;
2017-02-01 13:45:45 +01:00
if ( ! valid_extensions . has ( ext ) ) {
2016-01-05 14:36:24 +01:00
continue ; //invalid
2017-02-01 13:45:45 +01:00
}
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory : : FileInfo * fi = memnew ( EditorFileSystemDirectory : : FileInfo ) ;
fi - > file = E - > get ( ) ;
2016-01-05 14:36:24 +01:00
String path = cd . plus_file ( fi - > file ) ;
FileCache * fc = file_cache . getptr ( path ) ;
uint64_t mt = FileAccess : : get_modified_time ( path ) ;
2017-02-01 13:45:45 +01:00
if ( import_extensions . has ( ext ) ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
//is imported
uint64_t import_mt = 0 ;
if ( FileAccess : : exists ( path + " .import " ) ) {
import_mt = FileAccess : : get_modified_time ( path + " .import " ) ;
2017-02-01 13:45:45 +01:00
}
2016-01-05 14:36:24 +01:00
2017-11-24 17:39:41 +01:00
if ( fc & & fc - > modification_time = = mt & & fc - > import_modification_time = = import_mt & & ! _test_for_reimport ( path , true ) ) {
2017-02-04 15:12:03 +01:00
2017-03-05 16:44:50 +01:00
fi - > type = fc - > type ;
2017-08-17 22:02:43 +02:00
fi - > deps = fc - > deps ;
2017-03-05 16:44:50 +01:00
fi - > modified_time = fc - > modification_time ;
fi - > import_modified_time = fc - > import_modification_time ;
2019-02-26 22:43:37 +01:00
2017-08-30 00:50:58 +02:00
fi - > import_valid = fc - > import_valid ;
2018-07-16 00:29:00 +02:00
fi - > script_class_name = fc - > script_class_name ;
2019-04-19 20:54:33 +02:00
fi - > import_group_file = fc - > import_group_file ;
2018-07-16 00:29:00 +02:00
fi - > script_class_extends = fc - > script_class_extends ;
2018-07-29 05:36:43 +02:00
fi - > script_class_icon_path = fc - > script_class_icon_path ;
2018-07-16 00:29:00 +02:00
2019-02-26 22:43:37 +01:00
if ( revalidate_import_files & & ! ResourceFormatImporter : : get_singleton ( ) - > are_import_settings_valid ( path ) ) {
ItemAction ia ;
ia . action = ItemAction : : ACTION_FILE_TEST_REIMPORT ;
ia . dir = p_dir ;
ia . file = E - > get ( ) ;
scan_actions . push_back ( ia ) ;
}
2017-08-15 21:27:15 +02:00
if ( fc - > type = = String ( ) ) {
fi - > type = ResourceLoader : : get_resource_type ( path ) ;
2019-04-19 20:54:33 +02:00
fi - > import_group_file = ResourceLoader : : get_import_group_file ( path ) ;
2017-08-15 21:27:15 +02:00
//there is also the chance that file type changed due to reimport, must probably check this somehow here (or kind of note it for next time in another file?)
2017-08-17 22:02:43 +02:00
//note: I think this should not happen any longer..
2017-08-15 21:27:15 +02:00
}
2017-02-01 13:45:45 +01:00
} else {
2017-03-05 16:44:50 +01:00
fi - > type = ResourceFormatImporter : : get_singleton ( ) - > get_resource_type ( path ) ;
2019-04-19 20:54:33 +02:00
fi - > import_group_file = ResourceFormatImporter : : get_singleton ( ) - > get_import_group_file ( path ) ;
2018-07-29 05:36:43 +02:00
fi - > script_class_name = _get_global_script_class ( fi - > type , path , & fi - > script_class_extends , & fi - > script_class_icon_path ) ;
2017-03-05 16:44:50 +01:00
fi - > modified_time = 0 ;
fi - > import_modified_time = 0 ;
2017-08-30 00:50:58 +02:00
fi - > import_valid = ResourceLoader : : is_import_valid ( path ) ;
2016-01-05 14:36:24 +01:00
ItemAction ia ;
2017-11-24 17:39:41 +01:00
ia . action = ItemAction : : ACTION_FILE_TEST_REIMPORT ;
2017-03-05 16:44:50 +01:00
ia . dir = p_dir ;
ia . file = E - > get ( ) ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
}
2016-05-27 19:18:40 +02:00
} else {
2017-02-01 13:45:45 +01:00
2017-08-17 22:02:43 +02:00
if ( fc & & fc - > modification_time = = mt ) {
//not imported, so just update type if changed
2017-03-05 16:44:50 +01:00
fi - > type = fc - > type ;
fi - > modified_time = fc - > modification_time ;
2017-08-17 22:02:43 +02:00
fi - > deps = fc - > deps ;
2017-03-05 16:44:50 +01:00
fi - > import_modified_time = 0 ;
2017-08-30 00:50:58 +02:00
fi - > import_valid = true ;
2018-07-16 00:29:00 +02:00
fi - > script_class_name = fc - > script_class_name ;
fi - > script_class_extends = fc - > script_class_extends ;
2018-07-29 05:36:43 +02:00
fi - > script_class_icon_path = fc - > script_class_icon_path ;
2017-02-01 13:45:45 +01:00
} else {
2017-08-17 22:02:43 +02:00
//new or modified time
2017-03-05 16:44:50 +01:00
fi - > type = ResourceLoader : : get_resource_type ( path ) ;
2018-07-29 05:36:43 +02:00
fi - > script_class_name = _get_global_script_class ( fi - > type , path , & fi - > script_class_extends , & fi - > script_class_icon_path ) ;
2017-08-17 22:02:43 +02:00
fi - > deps = _get_dependencies ( path ) ;
2017-03-05 16:44:50 +01:00
fi - > modified_time = mt ;
fi - > import_modified_time = 0 ;
2017-08-30 00:50:58 +02:00
fi - > import_valid = true ;
2017-02-01 13:45:45 +01:00
}
2016-01-05 14:36:24 +01:00
}
p_dir - > files . push_back ( fi ) ;
2017-03-05 16:44:50 +01:00
p_progress . update ( idx , total ) ;
2016-01-05 14:36:24 +01:00
}
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : _scan_fs_changes ( EditorFileSystemDirectory * p_dir , const ScanProgress & p_progress ) {
2016-01-05 14:36:24 +01:00
uint64_t current_mtime = FileAccess : : get_modified_time ( p_dir - > get_path ( ) ) ;
2017-03-05 16:44:50 +01:00
bool updated_dir = false ;
2017-02-01 13:45:45 +01:00
String cd = p_dir - > get_path ( ) ;
2016-01-05 14:36:24 +01:00
2019-04-27 16:50:26 +02:00
if ( current_mtime ! = p_dir - > modified_time | | using_fat32_or_exfat ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
updated_dir = true ;
p_dir - > modified_time = current_mtime ;
2016-01-05 14:36:24 +01:00
//ooooops, dir changed, see what's going on
//first mark everything as veryfied
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > files . size ( ) ; i + + ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
p_dir - > files [ i ] - > verified = false ;
2016-01-05 14:36:24 +01:00
}
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > subdirs . size ( ) ; i + + ) {
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
p_dir - > get_subdir ( i ) - > verified = false ;
2016-01-05 14:36:24 +01:00
}
//then scan files and directories and check what's different
DirAccess * da = DirAccess : : create ( DirAccess : : ACCESS_RESOURCES ) ;
2017-02-01 13:45:45 +01:00
2016-01-05 14:36:24 +01:00
da - > change_dir ( cd ) ;
da - > list_dir_begin ( ) ;
while ( true ) {
bool isdir ;
2017-04-29 17:56:51 +02:00
String f = da - > get_next ( & isdir ) ;
if ( f = = " " )
2016-01-05 14:36:24 +01:00
break ;
if ( isdir ) {
2017-04-29 17:56:51 +02:00
if ( f . begins_with ( " . " ) ) //ignore hidden and . / ..
2016-01-05 14:36:24 +01:00
continue ;
2017-04-29 17:56:51 +02:00
int idx = p_dir - > find_dir_index ( f ) ;
2017-03-05 16:44:50 +01:00
if ( idx = = - 1 ) {
2016-01-05 14:36:24 +01:00
2017-05-01 17:44:52 +02:00
if ( FileAccess : : exists ( cd . plus_file ( f ) . plus_file ( " project.godot " ) ) ) // skip if another project inside this
2016-01-05 14:36:24 +01:00
continue ;
2017-08-06 16:43:48 +02:00
if ( FileAccess : : exists ( cd . plus_file ( f ) . plus_file ( " .gdignore " ) ) ) // skip if another project inside this
continue ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * efd = memnew ( EditorFileSystemDirectory ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
efd - > parent = p_dir ;
2017-04-29 17:56:51 +02:00
efd - > name = f ;
2016-01-05 14:36:24 +01:00
DirAccess * d = DirAccess : : create ( DirAccess : : ACCESS_RESOURCES ) ;
2017-04-29 17:56:51 +02:00
d - > change_dir ( cd . plus_file ( f ) ) ;
2017-03-05 16:44:50 +01:00
_scan_new_dir ( efd , d , p_progress . get_sub ( 1 , 1 ) ) ;
2016-01-05 14:36:24 +01:00
memdelete ( d ) ;
ItemAction ia ;
2017-03-05 16:44:50 +01:00
ia . action = ItemAction : : ACTION_DIR_ADD ;
ia . dir = p_dir ;
2017-04-29 17:56:51 +02:00
ia . file = f ;
2017-03-05 16:44:50 +01:00
ia . new_dir = efd ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
} else {
2017-03-05 16:44:50 +01:00
p_dir - > subdirs [ idx ] - > verified = true ;
2016-01-05 14:36:24 +01:00
}
} else {
2017-04-29 17:56:51 +02:00
String ext = f . get_extension ( ) . to_lower ( ) ;
2016-01-05 14:36:24 +01:00
if ( ! valid_extensions . has ( ext ) )
continue ; //invalid
2017-04-29 17:56:51 +02:00
int idx = p_dir - > find_file_index ( f ) ;
2016-01-05 14:36:24 +01:00
2017-03-05 16:44:50 +01:00
if ( idx = = - 1 ) {
2016-01-05 14:36:24 +01:00
//never seen this file, add actition to add it
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory : : FileInfo * fi = memnew ( EditorFileSystemDirectory : : FileInfo ) ;
2017-04-29 17:56:51 +02:00
fi - > file = f ;
2016-01-05 14:36:24 +01:00
String path = cd . plus_file ( fi - > file ) ;
2017-03-05 16:44:50 +01:00
fi - > modified_time = FileAccess : : get_modified_time ( path ) ;
fi - > import_modified_time = 0 ;
fi - > type = ResourceLoader : : get_resource_type ( path ) ;
2018-07-29 05:36:43 +02:00
fi - > script_class_name = _get_global_script_class ( fi - > type , path , & fi - > script_class_extends , & fi - > script_class_icon_path ) ;
2017-08-30 00:50:58 +02:00
fi - > import_valid = ResourceLoader : : is_import_valid ( path ) ;
2019-04-19 20:54:33 +02:00
fi - > import_group_file = ResourceLoader : : get_import_group_file ( path ) ;
2016-01-05 14:36:24 +01:00
{
ItemAction ia ;
2017-03-05 16:44:50 +01:00
ia . action = ItemAction : : ACTION_FILE_ADD ;
ia . dir = p_dir ;
2017-04-29 17:56:51 +02:00
ia . file = f ;
2017-03-05 16:44:50 +01:00
ia . new_file = fi ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
2017-02-01 13:45:45 +01:00
}
if ( import_extensions . has ( ext ) ) {
//if it can be imported, and it was added, it needs to be reimported
2016-01-05 14:36:24 +01:00
ItemAction ia ;
2017-11-24 17:39:41 +01:00
ia . action = ItemAction : : ACTION_FILE_TEST_REIMPORT ;
2017-03-05 16:44:50 +01:00
ia . dir = p_dir ;
2017-04-29 17:56:51 +02:00
ia . file = f ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
}
} else {
2017-03-05 16:44:50 +01:00
p_dir - > files [ idx ] - > verified = true ;
2016-01-05 14:36:24 +01:00
}
}
}
2016-06-28 14:47:03 +02:00
2016-01-05 14:36:24 +01:00
da - > list_dir_end ( ) ;
memdelete ( da ) ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > files . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
if ( updated_dir & & ! p_dir - > files [ i ] - > verified ) {
//this file was removed, add action to remove it
ItemAction ia ;
2017-03-05 16:44:50 +01:00
ia . action = ItemAction : : ACTION_FILE_REMOVE ;
ia . dir = p_dir ;
ia . file = p_dir - > files [ i ] - > file ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
continue ;
}
2016-07-03 18:15:15 +02:00
2018-11-21 01:47:48 +01:00
String path = cd . plus_file ( p_dir - > files [ i ] - > file ) ;
2017-02-01 13:45:45 +01:00
if ( import_extensions . has ( p_dir - > files [ i ] - > file . get_extension ( ) . to_lower ( ) ) ) {
//check here if file must be imported or not
uint64_t mt = FileAccess : : get_modified_time ( path ) ;
2017-03-05 16:44:50 +01:00
bool reimport = false ;
2017-02-01 13:45:45 +01:00
2017-03-05 16:44:50 +01:00
if ( mt ! = p_dir - > files [ i ] - > modified_time ) {
reimport = true ; //it was modified, must be reimported.
} else if ( ! FileAccess : : exists ( path + " .import " ) ) {
reimport = true ; //no .import file, obviously reimport
2017-02-01 13:45:45 +01:00
} else {
2017-03-05 16:44:50 +01:00
uint64_t import_mt = FileAccess : : get_modified_time ( path + " .import " ) ;
if ( import_mt ! = p_dir - > files [ i ] - > import_modified_time ) {
reimport = true ;
2017-11-24 17:39:41 +01:00
} else if ( _test_for_reimport ( path , true ) ) {
2017-03-05 16:44:50 +01:00
reimport = true ;
2017-02-01 13:45:45 +01:00
}
}
if ( reimport ) {
ItemAction ia ;
2017-11-24 17:39:41 +01:00
ia . action = ItemAction : : ACTION_FILE_TEST_REIMPORT ;
2017-03-05 16:44:50 +01:00
ia . dir = p_dir ;
ia . file = p_dir - > files [ i ] - > file ;
2017-02-01 13:45:45 +01:00
scan_actions . push_back ( ia ) ;
}
2018-11-21 01:47:48 +01:00
} else if ( ResourceCache : : has ( path ) ) { //test for potential reload
uint64_t mt = FileAccess : : get_modified_time ( path ) ;
if ( mt ! = p_dir - > files [ i ] - > modified_time ) {
p_dir - > files [ i ] - > modified_time = mt ; //save new time, but test for reload
ItemAction ia ;
ia . action = ItemAction : : ACTION_FILE_RELOAD ;
ia . dir = p_dir ;
ia . file = p_dir - > files [ i ] - > file ;
scan_actions . push_back ( ia ) ;
}
2014-02-10 02:10:30 +01:00
}
}
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > subdirs . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
if ( updated_dir & & ! p_dir - > subdirs [ i ] - > verified ) {
//this directory was removed, add action to remove it
ItemAction ia ;
2017-03-05 16:44:50 +01:00
ia . action = ItemAction : : ACTION_DIR_REMOVE ;
ia . dir = p_dir - > subdirs [ i ] ;
2016-01-05 14:36:24 +01:00
scan_actions . push_back ( ia ) ;
continue ;
}
2017-03-05 16:44:50 +01:00
_scan_fs_changes ( p_dir - > get_subdir ( i ) , p_progress ) ;
2014-02-10 02:10:30 +01:00
}
}
2017-08-15 01:13:14 +02:00
void EditorFileSystem : : _delete_internal_files ( String p_file ) {
if ( FileAccess : : exists ( p_file + " .import " ) ) {
List < String > paths ;
ResourceFormatImporter : : get_singleton ( ) - > get_internal_resource_path_list ( p_file , & paths ) ;
DirAccess * da = DirAccess : : create ( DirAccess : : ACCESS_RESOURCES ) ;
for ( List < String > : : Element * E = paths . front ( ) ; E ; E = E - > next ( ) ) {
da - > remove ( E - > get ( ) ) ;
}
da - > remove ( p_file + " .import " ) ;
memdelete ( da ) ;
}
}
2014-02-10 02:10:30 +01:00
void EditorFileSystem : : _thread_func_sources ( void * _userdata ) {
2017-03-05 16:44:50 +01:00
EditorFileSystem * efs = ( EditorFileSystem * ) _userdata ;
2014-02-10 02:10:30 +01:00
if ( efs - > filesystem ) {
2017-03-05 16:44:50 +01:00
EditorProgressBG pr ( " sources " , TTR ( " ScanSources " ) , 1000 ) ;
2016-01-05 14:36:24 +01:00
ScanProgress sp ;
2017-03-05 16:44:50 +01:00
sp . progress = & pr ;
sp . hi = 1 ;
sp . low = 0 ;
efs - > _scan_fs_changes ( efs - > filesystem , sp ) ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
efs - > scanning_changes_done = true ;
2014-02-10 02:10:30 +01:00
}
void EditorFileSystem : : get_changed_sources ( List < String > * r_changed ) {
2017-03-05 16:44:50 +01:00
* r_changed = sources_changed ;
2014-02-10 02:10:30 +01:00
}
2017-02-01 13:45:45 +01:00
void EditorFileSystem : : scan_changes ( ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( scanning | | scanning_changes | | thread )
2014-02-10 02:10:30 +01:00
return ;
2017-02-01 13:45:45 +01:00
_update_extensions ( ) ;
2014-02-10 02:10:30 +01:00
sources_changed . clear ( ) ;
2017-03-05 16:44:50 +01:00
scanning_changes = true ;
scanning_changes_done = false ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
abort_scan = false ;
2014-02-10 02:10:30 +01:00
if ( ! use_threads ) {
2016-01-05 14:36:24 +01:00
if ( filesystem ) {
2017-03-05 16:44:50 +01:00
EditorProgressBG pr ( " sources " , TTR ( " ScanSources " ) , 1000 ) ;
2016-01-05 14:36:24 +01:00
ScanProgress sp ;
2017-03-05 16:44:50 +01:00
sp . progress = & pr ;
sp . hi = 1 ;
sp . low = 0 ;
scan_total = 0 ;
_scan_fs_changes ( filesystem , sp ) ;
2016-01-05 14:36:24 +01:00
if ( _update_scan_actions ( ) )
emit_signal ( " filesystem_changed " ) ;
}
2017-03-05 16:44:50 +01:00
scanning_changes = false ;
scanning_changes_done = true ;
emit_signal ( " sources_changed " , sources_changed . size ( ) > 0 ) ;
2014-02-10 02:10:30 +01:00
} else {
ERR_FAIL_COND ( thread_sources ) ;
set_process ( true ) ;
2017-03-05 16:44:50 +01:00
scan_total = 0 ;
2014-02-10 02:10:30 +01:00
Thread : : Settings s ;
2017-03-05 16:44:50 +01:00
s . priority = Thread : : PRIORITY_LOW ;
thread_sources = Thread : : create ( _thread_func_sources , this , s ) ;
2014-02-10 02:10:30 +01:00
}
}
void EditorFileSystem : : _notification ( int p_what ) {
2017-03-05 16:44:50 +01:00
switch ( p_what ) {
2014-02-10 02:10:30 +01:00
2014-11-06 01:20:42 +01:00
case NOTIFICATION_ENTER_TREE : {
2014-02-10 02:10:30 +01:00
2017-11-15 16:42:08 +01:00
call_deferred ( " scan " ) ; //this should happen after every editor node entered the tree
2014-02-10 02:10:30 +01:00
} break ;
2014-11-06 01:20:42 +01:00
case NOTIFICATION_EXIT_TREE : {
2014-02-10 02:10:30 +01:00
if ( use_threads & & thread ) {
//abort thread if in progress
2017-03-05 16:44:50 +01:00
abort_scan = true ;
while ( scanning ) {
2014-02-10 02:10:30 +01:00
OS : : get_singleton ( ) - > delay_usec ( 1000 ) ;
}
Thread : : wait_to_finish ( thread ) ;
memdelete ( thread ) ;
2017-03-05 16:44:50 +01:00
thread = NULL ;
2016-05-04 15:28:37 +02:00
WARN_PRINTS ( " Scan thread aborted... " ) ;
2014-02-10 02:10:30 +01:00
set_process ( false ) ;
}
if ( filesystem )
memdelete ( filesystem ) ;
2016-01-05 14:36:24 +01:00
if ( new_filesystem )
memdelete ( new_filesystem ) ;
2017-03-05 16:44:50 +01:00
filesystem = NULL ;
new_filesystem = NULL ;
2014-02-10 02:10:30 +01:00
} break ;
case NOTIFICATION_PROCESS : {
if ( use_threads ) {
2017-02-01 13:45:45 +01:00
if ( scanning_changes ) {
2014-02-10 02:10:30 +01:00
2017-02-01 13:45:45 +01:00
if ( scanning_changes_done ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
scanning_changes = false ;
2014-02-10 02:10:30 +01:00
set_process ( false ) ;
Thread : : wait_to_finish ( thread_sources ) ;
memdelete ( thread_sources ) ;
2017-03-05 16:44:50 +01:00
thread_sources = NULL ;
2016-01-05 14:36:24 +01:00
if ( _update_scan_actions ( ) )
emit_signal ( " filesystem_changed " ) ;
2017-03-05 16:44:50 +01:00
emit_signal ( " sources_changed " , sources_changed . size ( ) > 0 ) ;
2018-07-16 00:29:00 +02:00
_queue_update_script_classes ( ) ;
2019-02-26 22:43:37 +01:00
first_scan = false ;
2014-02-10 02:10:30 +01:00
}
} else if ( ! scanning ) {
set_process ( false ) ;
if ( filesystem )
memdelete ( filesystem ) ;
2017-03-05 16:44:50 +01:00
filesystem = new_filesystem ;
new_filesystem = NULL ;
2014-02-10 02:10:30 +01:00
Thread : : wait_to_finish ( thread ) ;
memdelete ( thread ) ;
2017-03-05 16:44:50 +01:00
thread = NULL ;
2016-01-05 14:36:24 +01:00
_update_scan_actions ( ) ;
2014-02-10 02:10:30 +01:00
emit_signal ( " filesystem_changed " ) ;
2017-03-05 16:44:50 +01:00
emit_signal ( " sources_changed " , sources_changed . size ( ) > 0 ) ;
2018-07-16 00:29:00 +02:00
_queue_update_script_classes ( ) ;
2019-02-26 22:43:37 +01:00
first_scan = false ;
2014-02-10 02:10:30 +01:00
}
}
} break ;
}
}
bool EditorFileSystem : : is_scanning ( ) const {
2017-02-06 04:38:39 +01:00
return scanning | | scanning_changes ;
2014-02-10 02:10:30 +01:00
}
float EditorFileSystem : : get_scanning_progress ( ) const {
2016-01-05 14:36:24 +01:00
return scan_total ;
2014-02-10 02:10:30 +01:00
}
EditorFileSystemDirectory * EditorFileSystem : : get_filesystem ( ) {
return filesystem ;
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : _save_filesystem_cache ( EditorFileSystemDirectory * p_dir , FileAccess * p_file ) {
2014-02-10 02:10:30 +01:00
if ( ! p_dir )
return ; //none
2017-03-05 16:44:50 +01:00
p_file - > store_line ( " :: " + p_dir - > get_path ( ) + " :: " + String : : num ( p_dir - > modified_time ) ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > files . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2019-04-19 20:54:33 +02:00
if ( p_dir - > files [ i ] - > import_group_file ! = String ( ) ) {
group_file_cache . insert ( p_dir - > files [ i ] - > import_group_file ) ;
}
String s = p_dir - > files [ i ] - > file + " :: " + p_dir - > files [ i ] - > type + " :: " + itos ( p_dir - > files [ i ] - > modified_time ) + " :: " + itos ( p_dir - > files [ i ] - > import_modified_time ) + " :: " + itos ( p_dir - > files [ i ] - > import_valid ) + " :: " + p_dir - > files [ i ] - > import_group_file + " :: " + p_dir - > files [ i ] - > script_class_name + " <> " + p_dir - > files [ i ] - > script_class_extends + " <> " + p_dir - > files [ i ] - > script_class_icon_path ;
2017-03-05 16:44:50 +01:00
s + = " :: " ;
for ( int j = 0 ; j < p_dir - > files [ i ] - > deps . size ( ) ; j + + ) {
2015-08-24 01:15:56 +02:00
2017-03-05 16:44:50 +01:00
if ( j > 0 )
s + = " <> " ;
s + = p_dir - > files [ i ] - > deps [ j ] ;
2015-08-24 01:15:56 +02:00
}
2014-02-10 02:10:30 +01:00
p_file - > store_line ( s ) ;
}
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_dir - > subdirs . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
_save_filesystem_cache ( p_dir - > subdirs [ i ] , p_file ) ;
2014-02-10 02:10:30 +01:00
}
}
2017-03-05 16:44:50 +01:00
bool EditorFileSystem : : _find_file ( const String & p_file , EditorFileSystemDirectory * * r_d , int & r_file_pos ) const {
2014-02-10 02:10:30 +01:00
//todo make faster
2016-01-05 14:36:24 +01:00
if ( ! filesystem | | scanning )
return false ;
2014-02-10 02:10:30 +01:00
2017-07-19 22:00:46 +02:00
String f = ProjectSettings : : get_singleton ( ) - > localize_path ( p_file ) ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
if ( ! f . begins_with ( " res:// " ) )
return false ;
2017-03-05 16:44:50 +01:00
f = f . substr ( 6 , f . length ( ) ) ;
f = f . replace ( " \\ " , " / " ) ;
2014-02-10 02:10:30 +01:00
2016-01-05 14:36:24 +01:00
Vector < String > path = f . split ( " / " ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( path . size ( ) = = 0 )
2016-01-05 14:36:24 +01:00
return false ;
2017-03-05 16:44:50 +01:00
String file = path [ path . size ( ) - 1 ] ;
path . resize ( path . size ( ) - 1 ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = filesystem ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < path . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2017-02-04 15:12:03 +01:00
if ( path [ i ] . begins_with ( " . " ) )
return false ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
int idx = - 1 ;
for ( int j = 0 ; j < fs - > get_subdir_count ( ) ; j + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( fs - > get_subdir ( j ) - > get_name ( ) = = path [ i ] ) {
idx = j ;
2016-01-05 14:36:24 +01:00
break ;
}
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( idx = = - 1 ) {
2016-01-05 14:36:24 +01:00
//does not exist, create i guess?
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * efsd = memnew ( EditorFileSystemDirectory ) ;
2017-06-16 22:32:46 +02:00
2017-03-05 16:44:50 +01:00
efsd - > name = path [ i ] ;
2017-06-16 22:32:46 +02:00
efsd - > parent = fs ;
2017-03-05 16:44:50 +01:00
int idx2 = 0 ;
for ( int j = 0 ; j < fs - > get_subdir_count ( ) ; j + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( efsd - > name < fs - > get_subdir ( j ) - > get_name ( ) )
2016-01-05 14:36:24 +01:00
break ;
idx2 + + ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( idx2 = = fs - > get_subdir_count ( ) )
2016-01-05 14:36:24 +01:00
fs - > subdirs . push_back ( efsd ) ;
else
2017-03-05 16:44:50 +01:00
fs - > subdirs . insert ( idx2 , efsd ) ;
fs = efsd ;
2016-01-05 14:36:24 +01:00
} else {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
fs = fs - > get_subdir ( idx ) ;
2016-01-05 14:36:24 +01:00
}
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
int cpos = - 1 ;
for ( int i = 0 ; i < fs - > files . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( fs - > files [ i ] - > file = = file ) {
cpos = i ;
2016-01-05 14:36:24 +01:00
break ;
}
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
r_file_pos = cpos ;
* r_d = fs ;
2014-02-10 02:10:30 +01:00
2019-06-26 15:08:25 +02:00
return cpos ! = - 1 ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
String EditorFileSystem : : get_file_type ( const String & p_file ) const {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( ! _find_file ( p_file , & fs , cpos ) ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
return " " ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
return fs - > files [ cpos ] - > type ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * EditorFileSystem : : find_file ( const String & p_file , int * r_index ) const {
2016-05-27 19:18:40 +02:00
if ( ! filesystem | | scanning )
2017-03-05 16:44:50 +01:00
return NULL ;
2016-05-27 19:18:40 +02:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
if ( ! _find_file ( p_file , & fs , cpos ) ) {
2016-05-27 19:18:40 +02:00
2017-03-05 16:44:50 +01:00
return NULL ;
2016-05-27 19:18:40 +02:00
}
if ( r_index )
2017-03-05 16:44:50 +01:00
* r_index = cpos ;
2016-05-27 19:18:40 +02:00
return fs ;
}
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * EditorFileSystem : : get_filesystem_path ( const String & p_path ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( ! filesystem | | scanning )
return NULL ;
2014-02-10 02:10:30 +01:00
2017-07-19 22:00:46 +02:00
String f = ProjectSettings : : get_singleton ( ) - > localize_path ( p_path ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( ! f . begins_with ( " res:// " ) )
return NULL ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
f = f . substr ( 6 , f . length ( ) ) ;
f = f . replace ( " \\ " , " / " ) ;
if ( f = = String ( ) )
return filesystem ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( f . ends_with ( " / " ) )
f = f . substr ( 0 , f . length ( ) - 1 ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
Vector < String > path = f . split ( " / " ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( path . size ( ) = = 0 )
return NULL ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = filesystem ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < path . size ( ) ; i + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
int idx = - 1 ;
for ( int j = 0 ; j < fs - > get_subdir_count ( ) ; j + + ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( fs - > get_subdir ( j ) - > get_name ( ) = = path [ i ] ) {
idx = j ;
break ;
}
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( idx = = - 1 ) {
return NULL ;
} else {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
fs = fs - > get_subdir ( idx ) ;
}
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
return fs ;
2014-02-10 02:10:30 +01:00
}
2017-08-17 22:02:43 +02:00
void EditorFileSystem : : _save_late_updated_files ( ) {
//files that already existed, and were modified, need re-scanning for dependencies upon project restart. This is done via saving this special file
2018-07-16 00:29:00 +02:00
String fscache = EditorSettings : : get_singleton ( ) - > get_project_settings_dir ( ) . plus_file ( " filesystem_update4 " ) ;
2017-08-17 22:02:43 +02:00
FileAccessRef f = FileAccess : : open ( fscache , FileAccess : : WRITE ) ;
for ( Set < String > : : Element * E = late_update_files . front ( ) ; E ; E = E - > next ( ) ) {
f - > store_line ( E - > get ( ) ) ;
}
}
Vector < String > EditorFileSystem : : _get_dependencies ( const String & p_path ) {
List < String > deps ;
ResourceLoader : : get_dependencies ( p_path , & deps ) ;
Vector < String > ret ;
for ( List < String > : : Element * E = deps . front ( ) ; E ; E = E - > next ( ) ) {
ret . push_back ( E - > get ( ) ) ;
}
return ret ;
}
2018-07-29 05:36:43 +02:00
String EditorFileSystem : : _get_global_script_class ( const String & p_type , const String & p_path , String * r_extends , String * r_icon_path ) const {
2018-07-16 00:29:00 +02:00
for ( int i = 0 ; i < ScriptServer : : get_language_count ( ) ; i + + ) {
if ( ScriptServer : : get_language ( i ) - > handles_global_class_type ( p_type ) ) {
String global_name ;
String extends ;
2018-07-29 05:36:43 +02:00
String icon_path ;
2018-07-16 00:29:00 +02:00
2018-07-29 05:36:43 +02:00
global_name = ScriptServer : : get_language ( i ) - > get_global_class_name ( p_path , & extends , & icon_path ) ;
2018-07-16 00:29:00 +02:00
* r_extends = extends ;
2018-07-29 05:36:43 +02:00
* r_icon_path = icon_path ;
2018-07-16 00:29:00 +02:00
return global_name ;
}
}
* r_extends = String ( ) ;
2018-07-29 05:36:43 +02:00
* r_icon_path = String ( ) ;
2018-07-16 00:29:00 +02:00
return String ( ) ;
}
void EditorFileSystem : : _scan_script_classes ( EditorFileSystemDirectory * p_dir ) {
int filecount = p_dir - > files . size ( ) ;
const EditorFileSystemDirectory : : FileInfo * const * files = p_dir - > files . ptr ( ) ;
for ( int i = 0 ; i < filecount ; i + + ) {
if ( files [ i ] - > script_class_name = = String ( ) ) {
continue ;
}
String lang ;
for ( int j = 0 ; j < ScriptServer : : get_language_count ( ) ; j + + ) {
if ( ScriptServer : : get_language ( j ) - > handles_global_class_type ( files [ i ] - > type ) ) {
lang = ScriptServer : : get_language ( j ) - > get_name ( ) ;
}
}
ScriptServer : : add_global_class ( files [ i ] - > script_class_name , files [ i ] - > script_class_extends , lang , p_dir - > get_file_path ( i ) ) ;
2018-07-29 05:36:43 +02:00
EditorNode : : get_editor_data ( ) . script_class_set_icon_path ( files [ i ] - > script_class_name , files [ i ] - > script_class_icon_path ) ;
2018-09-02 23:40:51 +02:00
EditorNode : : get_editor_data ( ) . script_class_set_name ( files [ i ] - > file , files [ i ] - > script_class_name ) ;
2018-07-16 00:29:00 +02:00
}
for ( int i = 0 ; i < p_dir - > get_subdir_count ( ) ; i + + ) {
_scan_script_classes ( p_dir - > get_subdir ( i ) ) ;
}
}
void EditorFileSystem : : update_script_classes ( ) {
if ( ! update_script_classes_queued )
return ;
update_script_classes_queued = false ;
ScriptServer : : global_classes_clear ( ) ;
if ( get_filesystem ( ) ) {
_scan_script_classes ( get_filesystem ( ) ) ;
}
ScriptServer : : save_global_classes ( ) ;
2018-07-29 05:36:43 +02:00
EditorNode : : get_editor_data ( ) . script_class_save_icon_paths ( ) ;
2018-06-11 02:59:53 +02:00
// Rescan custom loaders and savers.
// Doing the following here because the `filesystem_changed` signal fires multiple times and isn't always followed by script classes update.
// So I thought it's better to do this when script classes really get updated
ResourceLoader : : remove_custom_loaders ( ) ;
ResourceLoader : : add_custom_loaders ( ) ;
ResourceSaver : : remove_custom_savers ( ) ;
ResourceSaver : : add_custom_savers ( ) ;
2018-07-16 00:29:00 +02:00
}
void EditorFileSystem : : _queue_update_script_classes ( ) {
if ( update_script_classes_queued ) {
return ;
}
update_script_classes_queued = true ;
call_deferred ( " update_script_classes " ) ;
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : update_file ( const String & p_file ) {
2014-11-12 15:23:23 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
2014-11-12 15:23:23 +01:00
2017-03-05 16:44:50 +01:00
if ( ! _find_file ( p_file , & fs , cpos ) ) {
2014-11-12 15:23:23 +01:00
2017-03-05 16:44:50 +01:00
if ( ! fs )
return ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( ! FileAccess : : exists ( p_file ) ) {
//was removed
2017-08-15 01:13:14 +02:00
_delete_internal_files ( p_file ) ;
2017-12-02 01:37:32 +01:00
if ( cpos ! = - 1 ) { // Might've never been part of the editor file system (*.* files deleted in Open dialog).
memdelete ( fs - > files [ cpos ] ) ;
fs - > files . remove ( cpos ) ;
}
2018-07-16 00:29:00 +02:00
2017-03-05 16:44:50 +01:00
call_deferred ( " emit_signal " , " filesystem_changed " ) ; //update later
2018-07-16 00:29:00 +02:00
_queue_update_script_classes ( ) ;
2014-02-10 02:10:30 +01:00
return ;
2017-03-05 16:44:50 +01:00
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
String type = ResourceLoader : : get_resource_type ( p_file ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( cpos = = - 1 ) {
2014-02-10 02:10:30 +01:00
2017-08-17 22:02:43 +02:00
//the file did not exist, it was added
late_added_files . insert ( p_file ) ; //remember that it was added. This mean it will be scanned and imported on editor restart
2017-03-05 16:44:50 +01:00
int idx = 0 ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < fs - > files . size ( ) ; i + + ) {
if ( p_file < fs - > files [ i ] - > file )
break ;
idx + + ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory : : FileInfo * fi = memnew ( EditorFileSystemDirectory : : FileInfo ) ;
fi - > file = p_file . get_file ( ) ;
fi - > import_modified_time = 0 ;
2017-08-30 00:50:58 +02:00
fi - > import_valid = ResourceLoader : : is_import_valid ( p_file ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( idx = = fs - > files . size ( ) ) {
fs - > files . push_back ( fi ) ;
} else {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
fs - > files . insert ( idx , fi ) ;
}
cpos = idx ;
2017-08-17 22:02:43 +02:00
} else {
//the file exists and it was updated, and was not added in this step.
//this means we must force upon next restart to scan it again, to get proper type and dependencies
late_update_files . insert ( p_file ) ;
_save_late_updated_files ( ) ; //files need to be updated in the re-scan
2017-03-05 16:44:50 +01:00
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
fs - > files [ cpos ] - > type = type ;
2018-07-29 05:36:43 +02:00
fs - > files [ cpos ] - > script_class_name = _get_global_script_class ( type , p_file , & fs - > files [ cpos ] - > script_class_extends , & fs - > files [ cpos ] - > script_class_icon_path ) ;
2019-04-19 20:54:33 +02:00
fs - > files [ cpos ] - > import_group_file = ResourceLoader : : get_import_group_file ( p_file ) ;
2017-03-05 16:44:50 +01:00
fs - > files [ cpos ] - > modified_time = FileAccess : : get_modified_time ( p_file ) ;
2017-08-17 22:02:43 +02:00
fs - > files [ cpos ] - > deps = _get_dependencies ( p_file ) ;
2017-08-30 00:50:58 +02:00
fs - > files [ cpos ] - > import_valid = ResourceLoader : : is_import_valid ( p_file ) ;
2014-02-10 02:10:30 +01:00
2018-03-10 14:21:17 +01:00
// Update preview
EditorResourcePreview : : get_singleton ( ) - > check_for_invalidation ( p_file ) ;
2017-03-05 16:44:50 +01:00
call_deferred ( " emit_signal " , " filesystem_changed " ) ; //update later
2018-07-16 00:29:00 +02:00
_queue_update_script_classes ( ) ;
2014-02-10 02:10:30 +01:00
}
2019-04-19 20:54:33 +02:00
Error EditorFileSystem : : _reimport_group ( const String & p_group_file , const Vector < String > & p_files ) {
String importer_name ;
Map < String , Map < StringName , Variant > > source_file_options ;
Map < String , String > base_paths ;
for ( int i = 0 ; i < p_files . size ( ) ; i + + ) {
Ref < ConfigFile > config ;
config . instance ( ) ;
Error err = config - > load ( p_files [ i ] + " .import " ) ;
ERR_CONTINUE ( err ! = OK ) ;
ERR_CONTINUE ( ! config - > has_section_key ( " remap " , " importer " ) ) ;
String file_importer_name = config - > get_value ( " remap " , " importer " ) ;
ERR_CONTINUE ( file_importer_name = = String ( ) ) ;
if ( importer_name ! = String ( ) & & importer_name ! = file_importer_name ) {
print_line ( " one importer: " + importer_name + " the other: " + file_importer_name ) ;
EditorNode : : get_singleton ( ) - > show_warning ( vformat ( TTR ( " There are multiple importers for different types pointing to file %s, import aborted " ) , p_group_file ) ) ;
ERR_FAIL_V ( ERR_FILE_CORRUPT ) ;
}
source_file_options [ p_files [ i ] ] = Map < StringName , Variant > ( ) ;
importer_name = file_importer_name ;
Ref < ResourceImporter > importer = ResourceFormatImporter : : get_singleton ( ) - > get_importer_by_name ( importer_name ) ;
ERR_FAIL_COND_V ( ! importer . is_valid ( ) , ERR_FILE_CORRUPT ) ;
List < ResourceImporter : : ImportOption > options ;
importer - > get_import_options ( & options ) ;
//set default values
for ( List < ResourceImporter : : ImportOption > : : Element * E = options . front ( ) ; E ; E = E - > next ( ) ) {
source_file_options [ p_files [ i ] ] [ E - > get ( ) . option . name ] = E - > get ( ) . default_value ;
}
if ( config - > has_section ( " params " ) ) {
List < String > sk ;
config - > get_section_keys ( " params " , & sk ) ;
for ( List < String > : : Element * E = sk . front ( ) ; E ; E = E - > next ( ) ) {
String param = E - > get ( ) ;
Variant value = config - > get_value ( " params " , param ) ;
//override with whathever is in file
source_file_options [ p_files [ i ] ] [ param ] = value ;
}
}
base_paths [ p_files [ i ] ] = ResourceFormatImporter : : get_singleton ( ) - > get_import_base_path ( p_files [ i ] ) ;
}
ERR_FAIL_COND_V ( importer_name = = String ( ) , ERR_UNCONFIGURED ) ;
Ref < ResourceImporter > importer = ResourceFormatImporter : : get_singleton ( ) - > get_importer_by_name ( importer_name ) ;
Error err = importer - > import_group_file ( p_group_file , source_file_options , base_paths ) ;
//all went well, overwrite config files with proper remaps and md5s
for ( Map < String , Map < StringName , Variant > > : : Element * E = source_file_options . front ( ) ; E ; E = E - > next ( ) ) {
2019-06-26 15:08:25 +02:00
const String & file = E - > key ( ) ;
2019-04-19 20:54:33 +02:00
String base_path = ResourceFormatImporter : : get_singleton ( ) - > get_import_base_path ( file ) ;
FileAccessRef f = FileAccess : : open ( file + " .import " , FileAccess : : WRITE ) ;
ERR_FAIL_COND_V ( ! f , ERR_FILE_CANT_OPEN ) ;
//write manually, as order matters ([remap] has to go first for performance).
f - > store_line ( " [remap] " ) ;
f - > store_line ( " " ) ;
f - > store_line ( " importer= \" " + importer - > get_importer_name ( ) + " \" " ) ;
if ( importer - > get_resource_type ( ) ! = " " ) {
f - > store_line ( " type= \" " + importer - > get_resource_type ( ) + " \" " ) ;
}
Vector < String > dest_paths ;
if ( err = = OK ) {
String path = base_path + " . " + importer - > get_save_extension ( ) ;
f - > store_line ( " path= \" " + path + " \" " ) ;
dest_paths . push_back ( path ) ;
}
f - > store_line ( " group_file= " + Variant ( p_group_file ) . get_construct_string ( ) ) ;
if ( err = = OK ) {
f - > store_line ( " valid=true " ) ;
} else {
f - > store_line ( " valid=false " ) ;
}
f - > store_line ( " [deps] \n " ) ;
f - > store_line ( " " ) ;
f - > store_line ( " source_file= " + Variant ( file ) . get_construct_string ( ) ) ;
if ( dest_paths . size ( ) ) {
Array dp ;
for ( int i = 0 ; i < dest_paths . size ( ) ; i + + ) {
dp . push_back ( dest_paths [ i ] ) ;
}
f - > store_line ( " dest_files= " + Variant ( dp ) . get_construct_string ( ) + " \n " ) ;
}
f - > store_line ( " [params] " ) ;
f - > store_line ( " " ) ;
//store options in provided order, to avoid file changing. Order is also important because first match is accepted first.
List < ResourceImporter : : ImportOption > options ;
importer - > get_import_options ( & options ) ;
//set default values
for ( List < ResourceImporter : : ImportOption > : : Element * F = options . front ( ) ; F ; F = F - > next ( ) ) {
String base = F - > get ( ) . option . name ;
Variant v = F - > get ( ) . default_value ;
if ( source_file_options [ file ] . has ( base ) ) {
v = source_file_options [ file ] [ base ] ;
}
String value ;
VariantWriter : : write_to_string ( v , value ) ;
f - > store_line ( base + " = " + value ) ;
}
f - > close ( ) ;
// Store the md5's of the various files. These are stored separately so that the .import files can be version controlled.
FileAccessRef md5s = FileAccess : : open ( base_path + " .md5 " , FileAccess : : WRITE ) ;
ERR_FAIL_COND_V ( ! md5s , ERR_FILE_CANT_OPEN ) ;
md5s - > store_line ( " source_md5= \" " + FileAccess : : get_md5 ( file ) + " \" " ) ;
if ( dest_paths . size ( ) ) {
md5s - > store_line ( " dest_md5= \" " + FileAccess : : get_multiple_md5 ( dest_paths ) + " \" \n " ) ;
}
md5s - > close ( ) ;
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
bool found = _find_file ( file , & fs , cpos ) ;
ERR_FAIL_COND_V ( ! found , ERR_UNCONFIGURED ) ;
//update modified times, to avoid reimport
fs - > files [ cpos ] - > modified_time = FileAccess : : get_modified_time ( file ) ;
fs - > files [ cpos ] - > import_modified_time = FileAccess : : get_modified_time ( file + " .import " ) ;
fs - > files [ cpos ] - > deps = _get_dependencies ( file ) ;
fs - > files [ cpos ] - > type = importer - > get_resource_type ( ) ;
fs - > files [ cpos ] - > import_valid = err = = OK ;
//if file is currently up, maybe the source it was loaded from changed, so import math must be updated for it
//to reload properly
if ( ResourceCache : : has ( file ) ) {
Resource * r = ResourceCache : : get ( file ) ;
if ( r - > get_import_path ( ) ! = String ( ) ) {
String dst_path = ResourceFormatImporter : : get_singleton ( ) - > get_internal_resource_path ( file ) ;
r - > set_import_path ( dst_path ) ;
r - > set_import_last_modified_time ( 0 ) ;
}
}
EditorResourcePreview : : get_singleton ( ) - > check_for_invalidation ( file ) ;
}
return err ;
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : _reimport_file ( const String & p_file ) {
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
bool found = _find_file ( p_file , & fs , cpos ) ;
2017-02-01 13:45:45 +01:00
ERR_FAIL_COND ( ! found ) ;
//try to obtain existing params
2017-03-05 16:44:50 +01:00
Map < StringName , Variant > params ;
2017-02-01 13:45:45 +01:00
String importer_name ;
2017-03-05 16:44:50 +01:00
if ( FileAccess : : exists ( p_file + " .import " ) ) {
2017-07-23 23:48:05 +02:00
//use existing
2017-02-01 13:45:45 +01:00
Ref < ConfigFile > cf ;
cf . instance ( ) ;
2017-03-05 16:44:50 +01:00
Error err = cf - > load ( p_file + " .import " ) ;
if ( err = = OK ) {
2018-08-28 20:24:58 +02:00
if ( cf - > has_section ( " params " ) ) {
List < String > sk ;
cf - > get_section_keys ( " params " , & sk ) ;
for ( List < String > : : Element * E = sk . front ( ) ; E ; E = E - > next ( ) ) {
params [ E - > get ( ) ] = cf - > get_value ( " params " , E - > get ( ) ) ;
}
}
if ( cf - > has_section ( " remap " ) ) {
importer_name = cf - > get_value ( " remap " , " importer " ) ;
2017-02-01 13:45:45 +01:00
}
}
2017-08-17 22:02:43 +02:00
} else {
late_added_files . insert ( p_file ) ; //imported files do not call update_file(), but just in case..
2017-02-01 13:45:45 +01:00
}
Ref < ResourceImporter > importer ;
2017-07-23 23:48:05 +02:00
bool load_default = false ;
2017-02-01 13:45:45 +01:00
//find the importer
2017-03-05 16:44:50 +01:00
if ( importer_name ! = " " ) {
2017-02-01 13:45:45 +01:00
importer = ResourceFormatImporter : : get_singleton ( ) - > get_importer_by_name ( importer_name ) ;
}
if ( importer . is_null ( ) ) {
//not found by name, find by extension
importer = ResourceFormatImporter : : get_singleton ( ) - > get_importer_by_extension ( p_file . get_extension ( ) ) ;
2017-07-23 23:48:05 +02:00
load_default = true ;
2017-02-01 13:45:45 +01:00
if ( importer . is_null ( ) ) {
ERR_PRINT ( " BUG: File queued for import, but can't be imported! " ) ;
ERR_FAIL ( ) ;
}
}
//mix with default params, in case a parameter is missing
List < ResourceImporter : : ImportOption > opts ;
importer - > get_import_options ( & opts ) ;
2017-03-05 16:44:50 +01:00
for ( List < ResourceImporter : : ImportOption > : : Element * E = opts . front ( ) ; E ; E = E - > next ( ) ) {
2017-02-01 13:45:45 +01:00
if ( ! params . has ( E - > get ( ) . option . name ) ) { //this one is not present
2017-03-05 16:44:50 +01:00
params [ E - > get ( ) . option . name ] = E - > get ( ) . default_value ;
2017-02-01 13:45:45 +01:00
}
}
2017-10-05 20:34:34 +02:00
if ( load_default & & ProjectSettings : : get_singleton ( ) - > has_setting ( " importer_defaults/ " + importer - > get_importer_name ( ) ) ) {
2017-07-23 23:48:05 +02:00
//use defaults if exist
Dictionary d = ProjectSettings : : get_singleton ( ) - > get ( " importer_defaults/ " + importer - > get_importer_name ( ) ) ;
List < Variant > v ;
d . get_key_list ( & v ) ;
for ( List < Variant > : : Element * E = v . front ( ) ; E ; E = E - > next ( ) ) {
params [ E - > get ( ) ] = d [ E - > get ( ) ] ;
}
}
2017-02-01 13:45:45 +01:00
//finally, perform import!!
String base_path = ResourceFormatImporter : : get_singleton ( ) - > get_import_base_path ( p_file ) ;
List < String > import_variants ;
2017-02-02 00:41:05 +01:00
List < String > gen_files ;
2019-02-26 22:43:37 +01:00
Variant metadata ;
Error err = importer - > import ( p_file , base_path , params , & import_variants , & gen_files , & metadata ) ;
2017-02-01 13:45:45 +01:00
2017-03-05 16:44:50 +01:00
if ( err ! = OK ) {
ERR_PRINTS ( " Error importing: " + p_file ) ;
2017-02-02 00:41:05 +01:00
}
2017-02-01 13:45:45 +01:00
//as import is complete, save the .import file
2017-03-05 16:44:50 +01:00
FileAccess * f = FileAccess : : open ( p_file + " .import " , FileAccess : : WRITE ) ;
2017-02-01 13:45:45 +01:00
ERR_FAIL_COND ( ! f ) ;
//write manually, as order matters ([remap] has to go first for performance).
f - > store_line ( " [remap] " ) ;
f - > store_line ( " " ) ;
2017-03-05 16:44:50 +01:00
f - > store_line ( " importer= \" " + importer - > get_importer_name ( ) + " \" " ) ;
if ( importer - > get_resource_type ( ) ! = " " ) {
f - > store_line ( " type= \" " + importer - > get_resource_type ( ) + " \" " ) ;
2017-02-02 00:41:05 +01:00
}
2017-02-01 13:45:45 +01:00
2017-12-27 19:21:18 +01:00
Vector < String > dest_paths ;
2017-08-30 00:50:58 +02:00
if ( err = = OK ) {
2017-02-06 04:38:39 +01:00
2017-08-30 00:50:58 +02:00
if ( importer - > get_save_extension ( ) = = " " ) {
//no path
} else if ( import_variants . size ( ) ) {
//import with variants
for ( List < String > : : Element * E = import_variants . front ( ) ; E ; E = E - > next ( ) ) {
2017-02-06 04:38:39 +01:00
2017-08-30 00:50:58 +02:00
String path = base_path . c_escape ( ) + " . " + E - > get ( ) + " . " + importer - > get_save_extension ( ) ;
f - > store_line ( " path. " + E - > get ( ) + " = \" " + path + " \" " ) ;
2017-12-27 19:21:18 +01:00
dest_paths . push_back ( path ) ;
2017-08-30 00:50:58 +02:00
}
} else {
2017-12-27 19:21:18 +01:00
String path = base_path + " . " + importer - > get_save_extension ( ) ;
f - > store_line ( " path= \" " + path + " \" " ) ;
dest_paths . push_back ( path ) ;
2017-02-01 13:45:45 +01:00
}
2017-08-30 00:50:58 +02:00
2017-02-01 13:45:45 +01:00
} else {
2017-08-30 00:50:58 +02:00
f - > store_line ( " valid=false " ) ;
2017-02-01 13:45:45 +01:00
}
2019-02-26 22:43:37 +01:00
if ( metadata ! = Variant ( ) ) {
f - > store_line ( " metadata= " + metadata . get_construct_string ( ) ) ;
}
2017-02-01 13:45:45 +01:00
f - > store_line ( " " ) ;
2017-08-17 22:02:43 +02:00
2017-11-24 17:57:18 +01:00
f - > store_line ( " [deps] \n " ) ;
2017-11-24 17:39:41 +01:00
2017-02-02 00:41:05 +01:00
if ( gen_files . size ( ) ) {
Array genf ;
2017-03-05 16:44:50 +01:00
for ( List < String > : : Element * E = gen_files . front ( ) ; E ; E = E - > next ( ) ) {
2017-02-02 00:41:05 +01:00
genf . push_back ( E - > get ( ) ) ;
2017-12-27 19:21:18 +01:00
dest_paths . push_back ( E - > get ( ) ) ;
2017-02-02 00:41:05 +01:00
}
String value ;
2017-03-05 16:44:50 +01:00
VariantWriter : : write_to_string ( genf , value ) ;
f - > store_line ( " files= " + value ) ;
2017-02-02 00:41:05 +01:00
f - > store_line ( " " ) ;
}
2017-12-27 19:21:18 +01:00
f - > store_line ( " source_file= " + Variant ( p_file ) . get_construct_string ( ) ) ;
2017-11-24 17:39:41 +01:00
2017-12-27 19:21:18 +01:00
if ( dest_paths . size ( ) ) {
Array dp ;
for ( int i = 0 ; i < dest_paths . size ( ) ; i + + ) {
dp . push_back ( dest_paths [ i ] ) ;
}
2018-03-01 09:39:40 +01:00
f - > store_line ( " dest_files= " + Variant ( dp ) . get_construct_string ( ) + " \n " ) ;
2017-12-27 19:21:18 +01:00
}
2017-02-01 13:45:45 +01:00
f - > store_line ( " [params] " ) ;
f - > store_line ( " " ) ;
2017-07-19 22:00:46 +02:00
//store options in provided order, to avoid file changing. Order is also important because first match is accepted first.
2017-02-06 04:38:39 +01:00
2017-03-05 16:44:50 +01:00
for ( List < ResourceImporter : : ImportOption > : : Element * E = opts . front ( ) ; E ; E = E - > next ( ) ) {
2017-02-01 13:45:45 +01:00
String base = E - > get ( ) . option . name ;
String value ;
2017-03-05 16:44:50 +01:00
VariantWriter : : write_to_string ( params [ base ] , value ) ;
f - > store_line ( base + " = " + value ) ;
2017-02-01 13:45:45 +01:00
}
2017-02-04 15:12:03 +01:00
f - > close ( ) ;
2017-02-01 13:45:45 +01:00
memdelete ( f ) ;
2018-03-01 09:39:40 +01:00
// Store the md5's of the various files. These are stored separately so that the .import files can be version controlled.
FileAccess * md5s = FileAccess : : open ( base_path + " .md5 " , FileAccess : : WRITE ) ;
ERR_FAIL_COND ( ! md5s ) ;
md5s - > store_line ( " source_md5= \" " + FileAccess : : get_md5 ( p_file ) + " \" " ) ;
if ( dest_paths . size ( ) ) {
md5s - > store_line ( " dest_md5= \" " + FileAccess : : get_multiple_md5 ( dest_paths ) + " \" \n " ) ;
}
md5s - > close ( ) ;
memdelete ( md5s ) ;
2017-02-01 13:45:45 +01:00
//update modified times, to avoid reimport
fs - > files [ cpos ] - > modified_time = FileAccess : : get_modified_time ( p_file ) ;
2017-03-05 16:44:50 +01:00
fs - > files [ cpos ] - > import_modified_time = FileAccess : : get_modified_time ( p_file + " .import " ) ;
2017-08-17 22:02:43 +02:00
fs - > files [ cpos ] - > deps = _get_dependencies ( p_file ) ;
fs - > files [ cpos ] - > type = importer - > get_resource_type ( ) ;
2017-08-30 00:50:58 +02:00
fs - > files [ cpos ] - > import_valid = ResourceLoader : : is_import_valid ( p_file ) ;
2017-02-06 04:38:39 +01:00
//if file is currently up, maybe the source it was loaded from changed, so import math must be updated for it
//to reload properly
if ( ResourceCache : : has ( p_file ) ) {
Resource * r = ResourceCache : : get ( p_file ) ;
2017-03-05 16:44:50 +01:00
if ( r - > get_import_path ( ) ! = String ( ) ) {
2017-02-06 04:38:39 +01:00
String dst_path = ResourceFormatImporter : : get_singleton ( ) - > get_internal_resource_path ( p_file ) ;
r - > set_import_path ( dst_path ) ;
r - > set_import_last_modified_time ( 0 ) ;
}
}
2017-11-12 04:48:00 +01:00
EditorResourcePreview : : get_singleton ( ) - > check_for_invalidation ( p_file ) ;
2017-02-01 13:45:45 +01:00
}
2019-04-19 20:54:33 +02:00
void EditorFileSystem : : _find_group_files ( EditorFileSystemDirectory * efd , Map < String , Vector < String > > & group_files , Set < String > & groups_to_reimport ) {
int fc = efd - > files . size ( ) ;
const EditorFileSystemDirectory : : FileInfo * const * files = efd - > files . ptr ( ) ;
for ( int i = 0 ; i < fc ; i + + ) {
if ( groups_to_reimport . has ( files [ i ] - > import_group_file ) ) {
if ( ! group_files . has ( files [ i ] - > import_group_file ) ) {
group_files [ files [ i ] - > import_group_file ] = Vector < String > ( ) ;
}
group_files [ files [ i ] - > import_group_file ] . push_back ( efd - > get_file_path ( i ) ) ;
}
}
for ( int i = 0 ; i < efd - > get_subdir_count ( ) ; i + + ) {
_find_group_files ( efd - > get_subdir ( i ) , group_files , groups_to_reimport ) ;
}
}
2017-03-05 16:44:50 +01:00
void EditorFileSystem : : reimport_files ( const Vector < String > & p_files ) {
2017-02-01 13:45:45 +01:00
2018-01-05 21:29:55 +01:00
{ //check that .import folder exists
DirAccess * da = DirAccess : : open ( " res:// " ) ;
if ( da - > change_dir ( " .import " ) ! = OK ) {
Error err = da - > make_dir ( " .import " ) ;
if ( err ) {
memdelete ( da ) ;
ERR_EXPLAIN ( " Failed to create 'res://.import' folder. " ) ;
2019-06-26 15:08:25 +02:00
ERR_FAIL ( ) ;
2018-01-05 21:29:55 +01:00
}
}
memdelete ( da ) ;
}
2017-03-05 16:44:50 +01:00
importing = true ;
EditorProgress pr ( " reimport " , TTR ( " (Re) Importing Assets " ), p_files.size()) ;
2017-09-21 01:59:19 +02:00
Vector < ImportFile > files ;
2019-04-19 20:54:33 +02:00
Set < String > groups_to_reimport ;
2017-09-21 01:59:19 +02:00
2017-03-05 16:44:50 +01:00
for ( int i = 0 ; i < p_files . size ( ) ; i + + ) {
2019-04-19 20:54:33 +02:00
String group_file = ResourceFormatImporter : : get_singleton ( ) - > get_import_group_file ( p_files [ i ] ) ;
if ( group_file_cache . has ( p_files [ i ] ) ) {
//maybe the file itself is a group!
groups_to_reimport . insert ( p_files [ i ] ) ;
//groups do not belong to grups
group_file = String ( ) ;
} else if ( group_file ! = String ( ) ) {
//it's a group file, add group to import and skip this file
groups_to_reimport . insert ( group_file ) ;
} else {
//it's a regular file
ImportFile ifile ;
ifile . path = p_files [ i ] ;
ifile . order = ResourceFormatImporter : : get_singleton ( ) - > get_import_order ( p_files [ i ] ) ;
files . push_back ( ifile ) ;
}
//group may have changed, so also update group reference
EditorFileSystemDirectory * fs = NULL ;
int cpos = - 1 ;
if ( _find_file ( p_files [ i ] , & fs , cpos ) ) {
fs - > files . write [ cpos ] - > import_group_file = group_file ;
}
2017-09-21 01:59:19 +02:00
}
files . sort ( ) ;
for ( int i = 0 ; i < files . size ( ) ; i + + ) {
pr . step ( files [ i ] . path . get_file ( ) , i ) ;
_reimport_file ( files [ i ] . path ) ;
2017-02-01 13:45:45 +01:00
}
2019-04-19 20:54:33 +02:00
//reimport groups
if ( groups_to_reimport . size ( ) ) {
Map < String , Vector < String > > group_files ;
_find_group_files ( filesystem , group_files , groups_to_reimport ) ;
for ( Map < String , Vector < String > > : : Element * E = group_files . front ( ) ; E ; E = E - > next ( ) ) {
Error err = _reimport_group ( E - > key ( ) , E - > get ( ) ) ;
if ( err = = OK ) {
_reimport_file ( E - > key ( ) ) ;
}
}
}
2017-02-01 13:45:45 +01:00
_save_filesystem_cache ( ) ;
2017-03-05 16:44:50 +01:00
importing = false ;
2017-02-06 04:38:39 +01:00
if ( ! is_scanning ( ) ) {
emit_signal ( " filesystem_changed " ) ;
}
2017-08-31 23:57:03 +02:00
emit_signal ( " resources_reimported " , p_files ) ;
2017-02-01 13:45:45 +01:00
}
2014-02-10 02:10:30 +01:00
2018-10-05 04:00:02 +02:00
Error EditorFileSystem : : _resource_import ( const String & p_path ) {
Vector < String > files ;
files . push_back ( p_path ) ;
singleton - > update_file ( p_path ) ;
singleton - > reimport_files ( files ) ;
return OK ;
}
2019-04-19 20:54:33 +02:00
bool EditorFileSystem : : is_group_file ( const String & p_path ) const {
return group_file_cache . has ( p_path ) ;
}
void EditorFileSystem : : _move_group_files ( EditorFileSystemDirectory * efd , const String & p_group_file , const String & p_new_location ) {
int fc = efd - > files . size ( ) ;
EditorFileSystemDirectory : : FileInfo * const * files = efd - > files . ptrw ( ) ;
for ( int i = 0 ; i < fc ; i + + ) {
if ( files [ i ] - > import_group_file = = p_group_file ) {
files [ i ] - > import_group_file = p_new_location ;
Ref < ConfigFile > config ;
config . instance ( ) ;
String path = efd - > get_file_path ( i ) + " .import " ;
Error err = config - > load ( path ) ;
if ( err ! = OK ) {
continue ;
}
if ( config - > has_section_key ( " remap " , " group_file " ) ) {
config - > set_value ( " remap " , " group_file " , p_new_location ) ;
}
List < String > sk ;
config - > get_section_keys ( " params " , & sk ) ;
for ( List < String > : : Element * E = sk . front ( ) ; E ; E = E - > next ( ) ) {
//not very clean, but should work
String param = E - > get ( ) ;
String value = config - > get_value ( " params " , param ) ;
if ( value = = p_group_file ) {
config - > set_value ( " params " , param , p_new_location ) ;
}
}
config - > save ( path ) ;
}
}
for ( int i = 0 ; i < efd - > get_subdir_count ( ) ; i + + ) {
_move_group_files ( efd - > get_subdir ( i ) , p_group_file , p_new_location ) ;
}
}
void EditorFileSystem : : move_group_file ( const String & p_path , const String & p_new_path ) {
if ( get_filesystem ( ) ) {
_move_group_files ( get_filesystem ( ) , p_path , p_new_path ) ;
if ( group_file_cache . has ( p_path ) ) {
group_file_cache . erase ( p_path ) ;
group_file_cache . insert ( p_new_path ) ;
}
}
}
2014-02-10 02:10:30 +01:00
void EditorFileSystem : : _bind_methods ( ) {
2017-08-09 13:19:41 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_filesystem " ) , & EditorFileSystem : : get_filesystem ) ;
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " is_scanning " ) , & EditorFileSystem : : is_scanning ) ;
ClassDB : : bind_method ( D_METHOD ( " get_scanning_progress " ) , & EditorFileSystem : : get_scanning_progress ) ;
ClassDB : : bind_method ( D_METHOD ( " scan " ) , & EditorFileSystem : : scan ) ;
ClassDB : : bind_method ( D_METHOD ( " scan_sources " ) , & EditorFileSystem : : scan_changes ) ;
ClassDB : : bind_method ( D_METHOD ( " update_file " , " path " ) , & EditorFileSystem : : update_file ) ;
2017-08-09 13:19:41 +02:00
ClassDB : : bind_method ( D_METHOD ( " get_filesystem_path " , " path " ) , & EditorFileSystem : : get_filesystem_path ) ;
2017-03-05 16:44:50 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_file_type " , " path " ) , & EditorFileSystem : : get_file_type ) ;
2018-07-16 00:29:00 +02:00
ClassDB : : bind_method ( D_METHOD ( " update_script_classes " ) , & EditorFileSystem : : update_script_classes ) ;
2017-03-05 16:44:50 +01:00
ADD_SIGNAL ( MethodInfo ( " filesystem_changed " ) ) ;
ADD_SIGNAL ( MethodInfo ( " sources_changed " , PropertyInfo ( Variant : : BOOL , " exist " ) ) ) ;
2017-08-31 23:57:03 +02:00
ADD_SIGNAL ( MethodInfo ( " resources_reimported " , PropertyInfo ( Variant : : POOL_STRING_ARRAY , " resources " ) ) ) ;
2018-11-21 01:47:48 +01:00
ADD_SIGNAL ( MethodInfo ( " resources_reload " , PropertyInfo ( Variant : : POOL_STRING_ARRAY , " resources " ) ) ) ;
2014-02-10 02:10:30 +01:00
}
2017-02-01 13:45:45 +01:00
void EditorFileSystem : : _update_extensions ( ) {
valid_extensions . clear ( ) ;
import_extensions . clear ( ) ;
List < String > extensionsl ;
2017-03-05 16:44:50 +01:00
ResourceLoader : : get_recognized_extensions_for_type ( " " , & extensionsl ) ;
for ( List < String > : : Element * E = extensionsl . front ( ) ; E ; E = E - > next ( ) ) {
2017-02-01 13:45:45 +01:00
valid_extensions . insert ( E - > get ( ) ) ;
}
extensionsl . clear ( ) ;
ResourceFormatImporter : : get_singleton ( ) - > get_recognized_extensions ( & extensionsl ) ;
2017-03-05 16:44:50 +01:00
for ( List < String > : : Element * E = extensionsl . front ( ) ; E ; E = E - > next ( ) ) {
2016-07-03 18:15:15 +02:00
2017-02-01 13:45:45 +01:00
import_extensions . insert ( E - > get ( ) ) ;
}
}
2016-07-03 18:15:15 +02:00
2014-02-10 02:10:30 +01:00
EditorFileSystem : : EditorFileSystem ( ) {
2018-10-05 04:00:02 +02:00
ResourceLoader : : import = _resource_import ;
2017-03-05 16:44:50 +01:00
reimport_on_missing_imported_files = GLOBAL_DEF ( " editor/reimport_missing_imported_files " , true ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
singleton = this ;
filesystem = memnew ( EditorFileSystemDirectory ) ; //like, empty
2017-06-16 22:32:46 +02:00
filesystem - > parent = NULL ;
2014-02-10 02:10:30 +01:00
thread = NULL ;
2017-03-05 16:44:50 +01:00
scanning = false ;
importing = false ;
use_threads = true ;
thread_sources = NULL ;
new_filesystem = NULL ;
2014-02-10 02:10:30 +01:00
2017-06-16 22:32:46 +02:00
abort_scan = false ;
2017-03-05 16:44:50 +01:00
scanning_changes = false ;
2017-06-16 22:32:46 +02:00
scanning_changes_done = false ;
2014-02-10 02:10:30 +01:00
2017-02-01 13:45:45 +01:00
DirAccess * da = DirAccess : : create ( DirAccess : : ACCESS_RESOURCES ) ;
2017-03-05 16:44:50 +01:00
if ( da - > change_dir ( " res://.import " ) ! = OK ) {
2017-02-01 13:45:45 +01:00
da - > make_dir ( " res://.import " ) ;
2016-01-05 14:36:24 +01:00
}
2019-04-27 16:50:26 +02:00
// This should probably also work on Unix and use the string it returns for FAT32 or exFAT
using_fat32_or_exfat = ( da - > get_filesystem_type ( ) = = " FAT32 " | | da - > get_filesystem_type ( ) = = " exFAT " ) ;
2017-02-01 13:45:45 +01:00
memdelete ( da ) ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
scan_total = 0 ;
2018-07-16 00:29:00 +02:00
update_script_classes_queued = false ;
2019-02-26 22:43:37 +01:00
first_scan = true ;
revalidate_import_files = false ;
2014-02-10 02:10:30 +01:00
}
EditorFileSystem : : ~ EditorFileSystem ( ) {
}