Merge pull request #89472 from dalexeev/gds-fix-bin-tokenizer-continuation-lines
GDScript: Fix continuation lines in `GDScriptTokenizerBuffer`
This commit is contained in:
commit
684e7aa37a
5 changed files with 35 additions and 11 deletions
|
@ -1455,10 +1455,11 @@ GDScriptTokenizer::Token GDScriptTokenizerText::scan() {
|
|||
if (_peek() != '\n') {
|
||||
return make_error("Expected new line after \"\\\".");
|
||||
}
|
||||
continuation_lines.push_back(line);
|
||||
_advance();
|
||||
newline(false);
|
||||
line_continuation = true;
|
||||
_skip_whitespace(); // Skip whitespace/comment lines after `\`. See GH-89403.
|
||||
continuation_lines.push_back(line);
|
||||
return scan(); // Recurse to get next token.
|
||||
}
|
||||
|
||||
|
|
|
@ -285,9 +285,9 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code,
|
|||
|
||||
// Remove continuation lines from map.
|
||||
for (int line : tokenizer.get_continuation_lines()) {
|
||||
if (rev_token_lines.has(line + 1)) {
|
||||
token_lines.erase(rev_token_lines[line + 1]);
|
||||
token_columns.erase(rev_token_lines[line + 1]);
|
||||
if (rev_token_lines.has(line)) {
|
||||
token_lines.erase(rev_token_lines[line]);
|
||||
token_columns.erase(rev_token_lines[line]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -300,9 +300,17 @@ bool GDScriptTestRunner::make_tests_for_dir(const String &p_dir) {
|
|||
#endif
|
||||
|
||||
String out_file = next.get_basename() + ".out";
|
||||
if (!is_generating && !dir->file_exists(out_file)) {
|
||||
ERR_FAIL_V_MSG(false, "Could not find output file for " + next);
|
||||
}
|
||||
ERR_FAIL_COND_V_MSG(!is_generating && !dir->file_exists(out_file), false, "Could not find output file for " + next);
|
||||
|
||||
if (next.ends_with(".bin.gd")) {
|
||||
// Test text mode first.
|
||||
GDScriptTest text_test(current_dir.path_join(next), current_dir.path_join(out_file), source_dir);
|
||||
tests.push_back(text_test);
|
||||
// Test binary mode even without `--use-binary-tokens`.
|
||||
GDScriptTest bin_test(current_dir.path_join(next), current_dir.path_join(out_file), source_dir);
|
||||
bin_test.set_tokenizer_mode(GDScriptTest::TOKENIZER_BUFFER);
|
||||
tests.push_back(bin_test);
|
||||
} else {
|
||||
GDScriptTest test(current_dir.path_join(next), current_dir.path_join(out_file), source_dir);
|
||||
if (binary_tokens) {
|
||||
test.set_tokenizer_mode(GDScriptTest::TOKENIZER_BUFFER);
|
||||
|
@ -310,6 +318,7 @@ bool GDScriptTestRunner::make_tests_for_dir(const String &p_dir) {
|
|||
tests.push_back(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
next = dir->get_next();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
# GH-89403
|
||||
|
||||
func test():
|
||||
var x := 1
|
||||
if x == 0 \
|
||||
# Comment.
|
||||
# Comment.
|
||||
and (x < 1 or x > 2) \
|
||||
# Comment.
|
||||
and x != 3:
|
||||
pass
|
||||
print("Ok")
|
|
@ -0,0 +1,2 @@
|
|||
GDTEST_OK
|
||||
Ok
|
Loading…
Reference in a new issue