/* * Copyright (c) 2022, Dex♪ * Copyright (c) 2023-2025, Tim Flynn * Copyright (c) 2023, Andreas Kling * Copyright (c) 2023-2024, Sam Atkins * Copyright (c) 2025, Jelle Raaijmakers * * SPDX-License-Identifier: BSD-2-Clause */ #include "Application.h" #include "Debug.h" #include "Display.h" #include "TestRunCapture.h" #include "TestWeb.h" #include "TestWebView.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include namespace TestWeb { static Vector s_view_display_states; static Vector> s_view_run_next_test; static RefPtr> s_all_tests_complete; static Vector s_skipped_tests; static Vector s_loaded_from_http_server; static HashMap s_current_test_index_by_view; struct TestRunContext { Vector& tests; size_t& tests_remaining; size_t& total_tests; }; static TestRunContext* s_run_context { nullptr }; Vector& view_states() { return s_view_display_states; } size_t total_tests() { return s_run_context ? s_run_context->total_tests : 0; } static ErrorOr prepare_output_path(Test const& test) { auto& app = Application::the(); auto base_path = LexicalPath::join(app.results_directory, test.safe_relative_path); TRY(Core::Directory::create(base_path.dirname(), Core::Directory::CreateDirectories::Yes)); return base_path.string(); } static bool is_valid_test_name(StringView test_name) { auto valid_test_file_suffixes = { ".htm"sv, ".html"sv, ".svg"sv, ".xhtml"sv, ".xht"sv }; return AK::any_of(valid_test_file_suffixes, [&](auto suffix) { return test_name.ends_with(suffix); }); } static ErrorOr real_path_for_test_input(ByteString const& path) { auto maybe_real_path = FileSystem::real_path(path); if (maybe_real_path.is_error()) warnln("Unable to get real path for TestConfig.ini entry '{}': {}", path, maybe_real_path.error()); return maybe_real_path; } static ErrorOr enumerate_test_files_recursively(StringView directory, Vector& output) { Core::DirIterator it(directory, Core::DirIterator::Flags::SkipDots); while (it.has_next()) { auto full_path = it.next_full_path(); if (FileSystem::is_directory(full_path)) { TRY(enumerate_test_files_recursively(full_path, output)); continue; } if (!is_valid_test_name(full_path)) continue; output.append(TRY(real_path_for_test_input(full_path))); } return {}; } static ErrorOr add_config_paths(StringView test_root_path, Vector const& keys, Vector& output) { for (auto const& key : keys) { auto path = LexicalPath::join(test_root_path, key).string(); if (key.ends_with('/')) { TRY(enumerate_test_files_recursively(path, output)); } else { output.append(TRY(real_path_for_test_input(path))); } } return {}; } static ErrorOr load_test_config(StringView test_root_path) { auto config_path = LexicalPath::join(test_root_path, "TestConfig.ini"sv); auto config_or_error = Core::ConfigFile::open(config_path.string()); if (config_or_error.is_error()) { if (config_or_error.error().code() == ENOENT) return {}; warnln("Unable to open test config {}", config_path); return config_or_error.release_error(); } auto config = config_or_error.release_value(); for (auto const& group : config->groups()) { if (group == "Skipped"sv) { TRY(add_config_paths(test_root_path, config->keys(group), s_skipped_tests)); } else if (group == "LoadFromHttpServer"sv) { TRY(add_config_paths(test_root_path, config->keys(group), s_loaded_from_http_server)); } else { warnln("Unknown group '{}' in config {}", group, config_path); } } return {}; } static ErrorOr collect_dump_tests(Application const& app, Vector& tests, StringView path, StringView trail, TestMode mode) { Core::DirIterator it(ByteString::formatted("{}/input/{}", path, trail), Core::DirIterator::Flags::SkipDots); while (it.has_next()) { auto name = it.next_path(); auto input_path = TRY(FileSystem::real_path(ByteString::formatted("{}/input/{}/{}", path, trail, name))); if (FileSystem::is_directory(input_path)) { TRY(collect_dump_tests(app, tests, path, ByteString::formatted("{}/{}", trail, name), mode)); continue; } if (!is_valid_test_name(name)) continue; auto expectation_path = ByteString::formatted("{}/expected/{}/{}.txt", path, trail, LexicalPath::title(name)); auto relative_path = LexicalPath::relative_path(input_path, app.test_root_path).release_value(); tests.append({ mode, input_path, move(expectation_path), relative_path, relative_path }); } return {}; } static ErrorOr collect_ref_tests(Application const& app, Vector& tests, StringView path, StringView trail) { Core::DirIterator it(ByteString::formatted("{}/input/{}", path, trail), Core::DirIterator::Flags::SkipDots); while (it.has_next()) { auto name = it.next_path(); auto input_path = TRY(FileSystem::real_path(ByteString::formatted("{}/input/{}/{}", path, trail, name))); if (FileSystem::is_directory(input_path)) { TRY(collect_ref_tests(app, tests, path, ByteString::formatted("{}/{}", trail, name))); continue; } if (!is_valid_test_name(name)) continue; auto relative_path = LexicalPath::relative_path(input_path, app.test_root_path).release_value(); tests.append({ TestMode::Ref, input_path, {}, relative_path, relative_path }); } return {}; } static ErrorOr collect_screenshot_tests(Application const& app, Vector& tests, StringView path, StringView trail) { Core::DirIterator it(ByteString::formatted("{}/input/{}", path, trail), Core::DirIterator::Flags::SkipDots); while (it.has_next()) { auto name = it.next_path(); auto input_path = TRY(FileSystem::real_path(ByteString::formatted("{}/input/{}/{}", path, trail, name))); if (FileSystem::is_directory(input_path)) { TRY(collect_screenshot_tests(app, tests, path, ByteString::formatted("{}/{}", trail, name))); continue; } if (!is_valid_test_name(name)) continue; auto expectation_path = ByteString::formatted("{}/expected/{}/{}.png", path, trail, LexicalPath::title(name)); auto relative_path = LexicalPath::relative_path(input_path, app.test_root_path).release_value(); tests.append({ TestMode::Screenshot, input_path, move(expectation_path), relative_path, relative_path }); } return {}; } static ErrorOr collect_crash_tests(Application const& app, Vector& tests, StringView path, StringView trail) { Core::DirIterator it(ByteString::formatted("{}/{}", path, trail), Core::DirIterator::Flags::SkipDots); while (it.has_next()) { auto name = it.next_path(); auto input_path = TRY(FileSystem::real_path(ByteString::formatted("{}/{}/{}", path, trail, name))); if (FileSystem::is_directory(input_path)) { TRY(collect_crash_tests(app, tests, path, ByteString::formatted("{}/{}", trail, name))); continue; } if (!is_valid_test_name(name)) continue; auto relative_path = LexicalPath::relative_path(input_path, app.test_root_path).release_value(); tests.append({ TestMode::Crash, input_path, {}, relative_path, relative_path }); } return {}; } static String generate_wait_for_test_string(StringView wait_class, StringView on_finish_script = ""sv) { return MUST(String::formatted(R"( function hasTestWaitClass() {{ return document.documentElement.classList.contains('{}'); }} function afterFontsAndPaint(callback) {{ document.fonts.ready.then(() => requestAnimationFrame(() => requestAnimationFrame(callback))); }} if (!hasTestWaitClass()) {{ afterFontsAndPaint(() => internals.signalTestIsDone("PASS")); }} else {{ const observer = new MutationObserver(() => {{ if (!hasTestWaitClass()) {{ observer.disconnect(); internals.signalTestIsDone("PASS"); }} }}); observer.observe(document.documentElement, {{ attributes: true, attributeFilter: ['class'], }}); {} }} )"sv, wait_class, on_finish_script)); } static auto wait_for_test_completion = generate_wait_for_test_string("test-wait"sv); // https://web-platform-tests.org/writing-tests/reftests.html#waiting-for-content-to-render static auto wait_for_reftest_completion = generate_wait_for_test_string("reftest-wait"sv, R"( afterFontsAndPaint(() => document.documentElement.dispatchEvent(new Event("TestRendered", { bubbles: true }))); )"sv); static ErrorOr generate_result_files(ReadonlySpan tests, ReadonlySpan non_passing_tests) { auto& app = Application::the(); auto& display = Display::the(); bool const has_helper_logs = FileSystem::exists(LexicalPath::join(app.results_directory, "helper-process-logs.html"sv).string()); auto const generated_at = UnixDateTime::now(); // Write results.js (as JS to avoid fetch CORS issues with file://) StringBuilder js; js.append("const RESULTS_DATA = {\n"sv); js.appendff(" \"summary\": {{ \"total\": {}, \"fail\": {}, \"timeout\": {}, \"crashed\": {}, \"skipped\": {} }},\n", total_tests(), display.fail_count, display.timeout_count, display.crashed_count, display.skipped_count); js.appendff(" \"generatedAt\": {},\n", generated_at.seconds_since_epoch()); js.appendff(" \"invocationCommandLine\": {},\n", JsonValue(app.invocation_command_line).serialized()); js.appendff(" \"hasLogs\": {},\n", has_helper_logs ? "true" : "false"); js.append(" \"tests\": [\n"sv); bool first = true; for (auto const& result : non_passing_tests) { if (result.result == TestResult::Skipped && app.verbosity < Application::VERBOSITY_LEVEL_LOG_SKIPPED_TESTS) continue; if (!first) js.append(",\n"sv); first = false; auto const& test = tests[result.test_index]; auto base_path = TRY(prepare_output_path(test)); bool has_std_logs = FileSystem::exists(ByteString::formatted("{}.logs.html", base_path)); js.appendff(" {{ \"name\": \"{}\", \"result\": \"{}\", \"mode\": \"{}\", \"hasLogs\": {}", test.safe_relative_path, test_result_to_string(result.result), test_mode_to_string(test.mode), has_std_logs ? "true" : "false"); if ((test.mode == TestMode::Ref || test.mode == TestMode::Screenshot) && test.diff_pixel_error_count > 0) js.appendff(", \"pixelErrors\": {}, \"maxChannelDiff\": {}", test.diff_pixel_error_count, test.diff_maximum_error); js.append(" }"sv); } js.append("\n ]\n};\n"sv); auto js_path = LexicalPath::join(app.results_directory, "results.js"sv).string(); auto js_file = TRY(Core::File::open(js_path, Core::File::OpenMode::Write | Core::File::OpenMode::Truncate)); TRY(js_file->write_until_depleted(js.string_view().bytes())); // Copy index.html from source tree auto source_html_path = LexicalPath::join(app.test_root_path, "test-web/results-index.html"sv).string(); auto dest_html_path = LexicalPath::join(app.results_directory, "index.html"sv).string(); auto source_html = TRY(Core::File::open(source_html_path, Core::File::OpenMode::Read)); auto html_contents = TRY(source_html->read_until_eof()); auto dest_html = TRY(Core::File::open(dest_html_path, Core::File::OpenMode::Write | Core::File::OpenMode::Truncate)); TRY(dest_html->write_until_depleted(html_contents)); return {}; } static ErrorOr write_test_diff_to_results(Test const& test, ByteBuffer const& expectation) { auto base_path = TRY(prepare_output_path(test)); // Write expected output auto expected_path = ByteString::formatted("{}.expected.txt", base_path); auto expected_file = TRY(Core::File::open(expected_path, Core::File::OpenMode::Write)); TRY(expected_file->write_until_depleted(expectation)); // Write actual output auto actual_path = ByteString::formatted("{}.actual.txt", base_path); auto actual_file = TRY(Core::File::open(actual_path, Core::File::OpenMode::Write)); TRY(actual_file->write_until_depleted(test.text.bytes())); // Write diff (plain text for tools) auto diff_path = ByteString::formatted("{}.diff.txt", base_path); auto diff_file = TRY(Core::File::open(diff_path, Core::File::OpenMode::Write)); auto hunks = TRY(Diff::from_text(expectation, test.text, 3)); TRY(Diff::write_unified_header(test.expectation_path, test.expectation_path, *diff_file)); for (auto const& hunk : hunks) TRY(Diff::write_unified(hunk, *diff_file, Diff::ColorOutput::No)); // Write diff (colorized HTML for viewer) auto html_path = ByteString::formatted("{}.diff.html", base_path); auto html_file = TRY(Core::File::open(html_path, Core::File::OpenMode::Write)); TRY(html_file->write_until_depleted(R"html(
)html"sv));

    // Write header
    TRY(html_file->write_until_depleted(""sv));
    TRY(html_file->write_formatted("--- {}\n", test.expectation_path));
    TRY(html_file->write_formatted("+++ {}\n", test.expectation_path));
    TRY(html_file->write_until_depleted(""sv));

    // Write hunks with colorization
    for (auto const& hunk : hunks) {
        TRY(html_file->write_formatted("{}\n", hunk.location));

        for (auto const& line : hunk.lines) {
            auto escaped = escape_html_entities(line.content);
            switch (line.operation) {
            case Diff::Line::Operation::Addition:
                TRY(html_file->write_formatted("+{}\n", escaped));
                break;
            case Diff::Line::Operation::Removal:
                TRY(html_file->write_formatted("-{}\n", escaped));
                break;
            case Diff::Line::Operation::Context:
                TRY(html_file->write_formatted(" {}\n", escaped));
                break;
            }
        }
    }

    TRY(html_file->write_until_depleted("
"sv)); return {}; } static void expand_test_with_variants(TestRunContext& context, size_t base_test_index, ReadonlySpan variants) { VERIFY(!variants.is_empty()); context.tests.ensure_capacity(context.tests.size() + variants.size()); auto const& base_test = context.tests[base_test_index]; for (auto const& variant : variants) { Test variant_test; variant_test.mode = base_test.mode; variant_test.run_index = base_test.run_index; variant_test.total_runs = base_test.total_runs; variant_test.input_path = base_test.input_path; variant_test.variant = variant; // relative_path uses '?' for display, safe_relative_path uses '@' for filesystem auto variant_suffix = StringView { variant }.substring_view(1); variant_test.relative_path = ByteString::formatted("{}?{}", base_test.relative_path, variant_suffix); variant_test.safe_relative_path = ByteString::formatted("{}@{}", base_test.safe_relative_path, variant_suffix); // Expected file: test@variant_suffix.txt auto dir = LexicalPath::dirname(base_test.expectation_path); auto title = LexicalPath::title(LexicalPath::basename(base_test.input_path)); if (dir.is_empty()) variant_test.expectation_path = ByteString::formatted("{}@{}.txt", title, variant_suffix); else variant_test.expectation_path = ByteString::formatted("{}/{}@{}.txt", dir, title, variant_suffix); // Set the index before appending so it matches the position in the vector variant_test.index = context.tests.size(); context.tests.unchecked_append(move(variant_test)); } // Add variants.size() because the original test will decrement tests_remaining when // it completes as Expanded, and each variant will also decrement when it completes. context.tests_remaining += variants.size(); // For display, add (variants.size() - 1) since Expanded tests don't count in s_completed_tests context.total_tests += variants.size() - 1; } static void run_dump_test(TestWebView& view, TestRunContext& context, Test& test, URL::URL const& url) { auto test_index = test.index; auto handle_completed_test = [&context, test_index, url]() -> ErrorOr { auto& test = context.tests[test_index]; if (test.expectation_path.is_empty()) { if (test.mode != TestMode::Crash) outln("{}", test.text); return TestResult::Pass; } auto open_expectation_file = [&](auto mode) { auto expectation_file_or_error = Core::File::open(test.expectation_path, mode); if (expectation_file_or_error.is_error()) warnln(ByteString::formatted("Failed opening '{}': {}", test.expectation_path, expectation_file_or_error.error())); return expectation_file_or_error; }; ByteBuffer expectation; if (auto expectation_file = open_expectation_file(Core::File::OpenMode::Read); !expectation_file.is_error()) { expectation = TRY(expectation_file.value()->read_until_eof()); auto result_trimmed = StringView { test.text }.trim("\n"sv, TrimMode::Right); auto expectation_trimmed = StringView { expectation }.trim("\n"sv, TrimMode::Right); if (result_trimmed == expectation_trimmed) return TestResult::Pass; } if (Application::the().rebaseline) { TRY(Core::Directory::create(LexicalPath { test.expectation_path }.parent().string(), Core::Directory::CreateDirectories::Yes)); auto expectation_file = TRY(open_expectation_file(Core::File::OpenMode::Write)); TRY(expectation_file->write_until_depleted(test.text)); return TestResult::Pass; } TRY(write_test_diff_to_results(test, expectation)); Display::the().print_failure_diff(url, test, expectation); return TestResult::Fail; }; auto on_test_complete = [&view, test_index, handle_completed_test]() { if (auto result = handle_completed_test(); result.is_error()) view.on_test_complete({ test_index, TestResult::Fail }); else view.on_test_complete({ test_index, result.value() }); }; if (test.mode == TestMode::Layout) { view.on_load_finish = [&view, url](auto const& loaded_url) { // We don't want subframe loads to trigger the test finish. if (!url.equals(loaded_url, URL::ExcludeFragment::Yes)) return; view.run_javascript(wait_for_test_completion); }; view.on_test_finish = [&view, &context, test_index, on_test_complete = move(on_test_complete)](auto const&) { // NOTE: We take a screenshot here to force the lazy layout of SVG-as-image documents to happen. // It also causes a lot more code to run, which is good for finding bugs. :^) view.take_screenshot()->when_resolved([&view, &context, test_index, on_test_complete = move(on_test_complete)](auto const&) { auto promise = view.request_internal_page_info(WebView::PageInfoType::LayoutTree | WebView::PageInfoType::PaintTree | WebView::PageInfoType::StackingContextTree); promise->when_resolved([&context, test_index, on_test_complete = move(on_test_complete)](auto const& text) { context.tests[test_index].text = text; on_test_complete(); }); }); }; } else if (test.mode == TestMode::Text) { // Set up variant detection callback. view.on_test_variant_metadata = [&view, &context, test_index, on_test_complete](JsonValue metadata) { // Verify this IPC response is for the current test on this view (use index to avoid dangling pointer issues) auto current_index = s_current_test_index_by_view.get(&view); if (!current_index.has_value() || *current_index != test_index) return; auto& test = context.tests[test_index]; if (test.variant.has_value()) return; auto const& variants_array = metadata.as_array(); if (!variants_array.is_empty()) { Vector variants; variants.ensure_capacity(variants_array.size()); for (auto const& variant : variants_array.values()) variants.unchecked_append(variant.as_string()); expand_test_with_variants(context, test_index, variants); view.on_test_complete({ test_index, TestResult::Expanded }); return; } auto& test_after_check = context.tests[test_index]; test_after_check.did_check_variants = true; if (test_after_check.did_finish_test) on_test_complete(); }; view.on_load_finish = [&view, &context, test_index, on_test_complete, url](auto const& loaded_url) { // We don't want subframe loads to trigger the test finish. if (!url.equals(loaded_url, URL::ExcludeFragment::Yes)) return; auto& test = context.tests[test_index]; test.did_finish_loading = true; if (!test.variant.has_value()) view.run_javascript("internals.loadTestVariants();"_string); else test.did_check_variants = true; if (test.expectation_path.is_empty()) { auto promise = view.request_internal_page_info(WebView::PageInfoType::Text); promise->when_resolved([&context, test_index, on_test_complete = move(on_test_complete)](auto const& text) { auto& test = context.tests[test_index]; test.text = text; on_test_complete(); }); } else if (test.did_finish_test && test.did_check_variants) { on_test_complete(); } }; view.on_test_finish = [&context, test_index, on_test_complete](auto const& text) { auto& test = context.tests[test_index]; test.text = text; test.did_finish_test = true; if (test.did_finish_loading && test.did_check_variants) on_test_complete(); }; } else if (test.mode == TestMode::Crash) { view.on_load_finish = [on_test_complete, url, &view, &context, test_index](auto const& loaded_url) { // We don't want subframe loads to trigger the test finish. if (!url.equals(loaded_url, URL::ExcludeFragment::Yes)) return; auto& test = context.tests[test_index]; test.did_finish_loading = true; view.run_javascript(wait_for_test_completion); if (test.did_finish_test) on_test_complete(); }; view.on_test_finish = [&context, test_index, on_test_complete](auto const&) { auto& test = context.tests[test_index]; test.did_finish_test = true; if (test.did_finish_loading) on_test_complete(); }; } view.load(url); } static ErrorOr dump_screenshot_to_file(Gfx::Bitmap const& bitmap, StringView path) { auto screenshot_file = TRY(Core::File::open(path, Core::File::OpenMode::Write)); auto encoded_data = TRY(Gfx::PNGWriter::encode(bitmap)); TRY(screenshot_file->write_until_depleted(encoded_data)); return {}; } static ErrorOr write_screenshot_failure_results(Test& test, Gfx::Bitmap const& actual, Gfx::Bitmap const& expected) { auto base_path = TRY(prepare_output_path(test)); TRY(dump_screenshot_to_file(actual, ByteString::formatted("{}.actual.png", base_path))); TRY(dump_screenshot_to_file(expected, ByteString::formatted("{}.expected.png", base_path))); // Generate a diff image and compute stats. if (actual.width() == expected.width() && actual.height() == expected.height()) { auto diff = actual.diff(expected); test.diff_pixel_error_count = diff.pixel_error_count; test.diff_maximum_error = diff.maximum_error; auto diff_bitmap = TRY(Gfx::Bitmap::create(Gfx::BitmapFormat::BGRA8888, { actual.width(), actual.height() })); for (int y = 0; y < actual.height(); ++y) { for (int x = 0; x < actual.width(); ++x) { auto pixel = actual.get_pixel(x, y); if (pixel != expected.get_pixel(x, y)) diff_bitmap->set_pixel(x, y, Gfx::Color(255, 0, 0)); else diff_bitmap->set_pixel(x, y, pixel.mixed_with(expected.get_pixel(x, y), 0.5f).mixed_with(Gfx::Color::White, 0.8f)); } } TRY(dump_screenshot_to_file(*diff_bitmap, ByteString::formatted("{}.diff.png", base_path))); } return {}; } static void run_ref_test(TestWebView& view, TestRunContext& context, Test& test, URL::URL const& url) { auto test_index = test.index; auto handle_completed_test = [&view, &context, test_index, url]() -> ErrorOr { auto& test = context.tests[test_index]; VERIFY(test.ref_test_expectation_type.has_value()); auto should_match = test.ref_test_expectation_type == RefTestExpectationType::Match; auto screenshot_matches = fuzzy_screenshot_match(url, view.url(), *test.actual_screenshot, *test.expectation_screenshot, test.fuzzy_matches, should_match); if (should_match == screenshot_matches) return TestResult::Pass; TRY(write_screenshot_failure_results(test, *test.actual_screenshot, *test.expectation_screenshot)); return TestResult::Fail; }; auto on_test_complete = [&view, test_index, handle_completed_test]() { if (auto result = handle_completed_test(); result.is_error()) view.on_test_complete({ test_index, TestResult::Fail }); else view.on_test_complete({ test_index, result.value() }); }; view.on_load_finish = [&view, &context, test_index, url](auto const& loaded_url) { auto& test = context.tests[test_index]; // We don't want subframe loads to trigger this. if (test.ref_test_expectation_url.has_value()) { // Match against the expectation URL. if (!test.ref_test_expectation_url->equals(loaded_url, URL::ExcludeFragment::Yes)) return; } else { // Match against the test URL. if (!url.equals(loaded_url, URL::ExcludeFragment::Yes)) return; } if (!test.did_inject_js) { test.did_inject_js = true; view.run_javascript(wait_for_reftest_completion); } }; view.on_test_finish = [&view, &context, test_index, on_test_complete = move(on_test_complete)](auto const&) { auto& test = context.tests[test_index]; if (test.actual_screenshot) { // The reference has finished loading; take another screenshot and move on to handling the result. view.take_screenshot()->when_resolved([&view, &context, test_index, on_test_complete = move(on_test_complete)](RefPtr screenshot) { context.tests[test_index].expectation_screenshot = move(screenshot); view.reset_zoom(); on_test_complete(); }); } else { // When the test initially finishes, we take a screenshot and request the reference test metadata. view.take_screenshot()->when_resolved([&view, &context, test_index](RefPtr screenshot) { context.tests[test_index].actual_screenshot = move(screenshot); view.reset_zoom(); view.run_javascript("internals.loadReferenceTestMetadata();"_string); }); } }; view.on_reference_test_metadata = [&view, &context, test_index](JsonValue const& metadata) { auto& test = context.tests[test_index]; auto metadata_object = metadata.as_object(); auto match_references = metadata_object.get_array("match_references"sv); auto mismatch_references = metadata_object.get_array("mismatch_references"sv); if (match_references->is_empty() && mismatch_references->is_empty()) { dbgln("No match or mismatch references in `{}`! Metadata: {}", view.url(), metadata_object.serialized()); VERIFY_NOT_REACHED(); } // Read fuzzy configurations. test.fuzzy_matches.clear_with_capacity(); auto fuzzy_values = metadata_object.get_array("fuzzy"sv); for (size_t i = 0; i < fuzzy_values->size(); ++i) { auto fuzzy_configuration = fuzzy_values->at(i).as_object(); Optional reference_url; auto reference = fuzzy_configuration.get_string("reference"sv); if (reference.has_value()) reference_url = URL::Parser::basic_parse(reference.release_value()); auto content = fuzzy_configuration.get_string("content"sv).release_value(); auto fuzzy_match_or_error = parse_fuzzy_match(reference_url, content); if (fuzzy_match_or_error.is_error()) { warnln("Failed to parse fuzzy configuration '{}' (reference: {}): {}", content, reference_url, fuzzy_match_or_error.error()); continue; } test.fuzzy_matches.append(fuzzy_match_or_error.release_value()); } // Read (mis)match reference tests to load. // FIXME: Currently we only support single match or mismatch reference. String reference_to_load; if (!match_references->is_empty()) { if (match_references->size() > 1) dbgln("FIXME: Only a single ref test match reference is supported"); test.ref_test_expectation_type = RefTestExpectationType::Match; reference_to_load = match_references->at(0).as_string(); } else { if (mismatch_references->size() > 1) dbgln("FIXME: Only a single ref test mismatch reference is supported"); test.ref_test_expectation_type = RefTestExpectationType::Mismatch; reference_to_load = mismatch_references->at(0).as_string(); } // Clear flag so we can inject the JS into the reference page. test.ref_test_expectation_url = URL::Parser::basic_parse(reference_to_load).release_value(); test.did_inject_js = false; view.load(test.ref_test_expectation_url.value()); }; view.load(url); } static void run_screenshot_test(TestWebView& view, TestRunContext& context, Test& test, URL::URL const& url) { auto test_index = test.index; auto handle_completed_test = [&context, test_index, url]() -> ErrorOr { auto& test = context.tests[test_index]; auto& actual = *test.actual_screenshot; // Try to load and compare against existing expected PNG first. auto expectation_file_or_error = Core::MappedFile::map(test.expectation_path); if (!expectation_file_or_error.is_error()) { auto decoder = TRY(Gfx::ImageDecoder::try_create_for_raw_bytes(expectation_file_or_error.value()->bytes())); if (decoder) { auto frame = TRY(decoder->frame(0)); test.expectation_screenshot = move(frame.image); auto const& expected = *test.expectation_screenshot; auto screenshot_matches = fuzzy_screenshot_match(url, url, actual, expected, test.fuzzy_matches, true); if (screenshot_matches) return TestResult::Pass; } } // Screenshots don't match (or expected file doesn't exist yet). if (Application::the().rebaseline) { TRY(Core::Directory::create(LexicalPath { test.expectation_path }.parent().string(), Core::Directory::CreateDirectories::Yes)); TRY(dump_screenshot_to_file(actual, test.expectation_path)); auto optipng_or_error = Core::Process::spawn({ .executable = "optipng"sv, .search_for_executable_in_path = true, .arguments = { "-strip"sv, "all"sv, test.expectation_path }, }); if (optipng_or_error.is_error()) { warnln("Warning: Failed to run optipng: {}", optipng_or_error.error()); } else { auto exit_code_or_error = optipng_or_error.value().wait_for_termination(); if (exit_code_or_error.is_error()) warnln("Warning: Failed to wait for optipng: {}", exit_code_or_error.error()); else if (exit_code_or_error.value() != 0) warnln("Warning: optipng exited with code {}", exit_code_or_error.value()); } return TestResult::Pass; } // Not rebaselining and no valid expectation loaded. if (!test.expectation_screenshot) return Error::from_string_literal("Could not decode expected screenshot PNG"); TRY(write_screenshot_failure_results(test, actual, *test.expectation_screenshot)); return TestResult::Fail; }; auto on_test_complete = [&view, test_index, handle_completed_test]() { if (auto result = handle_completed_test(); result.is_error()) view.on_test_complete({ test_index, TestResult::Fail }); else view.on_test_complete({ test_index, result.value() }); }; view.on_load_finish = [&view, &context, test_index, url](auto const& loaded_url) { // We don't want subframe loads to trigger this. if (!url.equals(loaded_url, URL::ExcludeFragment::Yes)) return; auto& test = context.tests[test_index]; if (!test.did_inject_js) { test.did_inject_js = true; view.run_javascript(wait_for_reftest_completion); } }; view.on_test_finish = [&view, &context, test_index, on_test_complete = move(on_test_complete)](auto const&) { // Take a screenshot of the rendered test page. view.take_screenshot()->when_resolved([&view, &context, test_index, on_test_complete = move(on_test_complete)](RefPtr screenshot) { context.tests[test_index].actual_screenshot = move(screenshot); view.reset_zoom(); // Load reference test metadata for fuzzy matching config. view.run_javascript("internals.loadReferenceTestMetadata();"_string); view.on_reference_test_metadata = [&context, test_index, on_test_complete = move(on_test_complete)](JsonValue const& metadata) { auto& test = context.tests[test_index]; auto metadata_object = metadata.as_object(); // Read fuzzy configurations (ignore match/mismatch references for Screenshot tests). test.fuzzy_matches.clear_with_capacity(); auto fuzzy_values = metadata_object.get_array("fuzzy"sv); for (size_t i = 0; i < fuzzy_values->size(); ++i) { auto fuzzy_configuration = fuzzy_values->at(i).as_object(); auto content = fuzzy_configuration.get_string("content"sv).release_value(); auto fuzzy_match_or_error = parse_fuzzy_match({}, content); if (fuzzy_match_or_error.is_error()) { warnln("Failed to parse fuzzy configuration '{}': {}", content, fuzzy_match_or_error.error()); continue; } test.fuzzy_matches.append(fuzzy_match_or_error.release_value()); } on_test_complete(); }; }); }; view.load(url); } static void run_test(TestWebView& view, TestRunContext& context, size_t test_index, Application& app, TestRunCapture& test_run_capture) { s_current_test_index_by_view.set(&view, test_index); auto& test = context.tests[test_index]; test_run_capture.begin_test_output_capture(view, test); auto timeout_in_milliseconds = app.per_test_timeout_in_seconds * 1000; test.timeout_timer = Core::Timer::create_single_shot(timeout_in_milliseconds, [&view, &context, test_index]() { auto& test = context.tests[test_index]; if (!test.did_start_test) dbgln("Timeout during pre-navigation for {}, WebContent process may be unresponsive", test.relative_path); view.on_test_complete({ test_index, TestResult::Timeout }); }); test.timeout_timer->start(); view.on_set_test_timeout = [&context, test_index, timeout_in_milliseconds](double milliseconds) { auto& test = context.tests[test_index]; if (milliseconds > timeout_in_milliseconds) test.timeout_timer->restart(AK::clamp_to(milliseconds)); }; // Clear the current document. // FIXME: Implement a debug-request to do this more thoroughly. auto promise = Core::Promise::construct(); view.on_load_finish = [promise](auto const& url) { if (!url.equals(URL::about_blank())) return; Core::deferred_invoke([promise]() { promise->resolve({}); }); }; view.on_test_finish = {}; promise->when_resolved([&view, test_index, &app, &context](auto) { auto& test = context.tests[test_index]; test.did_start_test = true; auto real_path = MUST(FileSystem::real_path(test.input_path)); auto headers_path = ByteString::formatted("{}.headers", real_path); Optional url; if (FileSystem::exists(headers_path) || s_loaded_from_http_server.contains_slow(test.input_path)) { // Some tests need to be served via the echo server so, for example, HTTP headers from .headers files are // sent, or so that the resulting HTML document has a HTTP based origin (e.g for testing cookies). auto echo_server_port = Application::web_content_options().echo_server_port; VERIFY(echo_server_port.has_value()); auto relative_path = LexicalPath::relative_path(real_path, app.test_root_path); VERIFY(relative_path.has_value()); url = URL::Parser::basic_parse(ByteString::formatted("http://localhost:{}/static/{}", echo_server_port.value(), relative_path.value())).release_value(); } else { url = URL::create_with_file_scheme(real_path).release_value(); } // Append variant query string if present (variant is "?foo=bar", set_query expects "foo=bar") if (test.variant.has_value()) url->set_query(MUST(test.variant->substring_from_byte_offset_with_shared_superstring(1))); switch (test.mode) { case TestMode::Crash: case TestMode::Text: case TestMode::Layout: run_dump_test(view, context, test, *url); return; case TestMode::Ref: run_ref_test(view, context, test, *url); return; case TestMode::Screenshot: run_screenshot_test(view, context, test, *url); return; } VERIFY_NOT_REACHED(); }); view.load(URL::about_blank()); } static void set_ui_callbacks_for_tests(TestWebView& view, TestRunCapture& test_run_capture) { view.on_request_file_picker = [&](auto const& accepted_file_types, auto allow_multiple_files) { // Create some dummy files for tests. Vector selected_files; bool add_txt_files = accepted_file_types.filters.is_empty(); bool add_cpp_files = false; for (auto const& filter : accepted_file_types.filters) { filter.visit( [](Web::HTML::FileFilter::FileType) {}, [&](Web::HTML::FileFilter::MimeType const& mime_type) { if (mime_type.value == "text/plain"sv) add_txt_files = true; }, [&](Web::HTML::FileFilter::Extension const& extension) { if (extension.value == "cpp"sv) add_cpp_files = true; }); } if (add_txt_files) { selected_files.empend("file1"sv, MUST(ByteBuffer::copy("Contents for file1"sv.bytes()))); if (allow_multiple_files == Web::HTML::AllowMultipleFiles::Yes) { selected_files.empend("file2"sv, MUST(ByteBuffer::copy("Contents for file2"sv.bytes()))); selected_files.empend("file3"sv, MUST(ByteBuffer::copy("Contents for file3"sv.bytes()))); selected_files.empend("file4"sv, MUST(ByteBuffer::copy("Contents for file4"sv.bytes()))); } } if (add_cpp_files) { selected_files.empend("file1.cpp"sv, MUST(ByteBuffer::copy("int main() {{ return 1; }}"sv.bytes()))); if (allow_multiple_files == Web::HTML::AllowMultipleFiles::Yes) { selected_files.empend("file2.cpp"sv, MUST(ByteBuffer::copy("int main() {{ return 2; }}"sv.bytes()))); } } view.file_picker_closed(move(selected_files)); }; view.on_request_alert = [&](auto const&) { // For tests, just close the alert right away to unblock JS execution. view.alert_closed(); }; view.on_web_content_crashed = [&view, &test_run_capture]() { test_run_capture.write_test_output(view); if (auto index = s_current_test_index_by_view.get(&view); index.has_value()) { view.on_test_complete({ *index, TestResult::Crashed }); } }; view.on_web_content_process_change_for_cross_site_navigation = [&view, &test_run_capture]() { test_run_capture.rebind_test_output_capture(view); }; } static ErrorOr run_tests(Core::AnonymousBuffer const& theme, Web::DevicePixelSize window_size) { auto& app = Application::the(); auto& display = Display::the(); TRY(load_test_config(app.test_root_path)); Vector tests; // Parse explicit variants from filters (e.g., "test.html?variant=foo") HashMap explicit_variants; for (auto& glob : app.test_globs) { if (auto query_pos = glob.find('?'); query_pos.has_value()) { auto base_glob = glob.substring(0, query_pos.value()); auto variant = MUST(String::from_utf8(glob.substring_view(query_pos.value()))); explicit_variants.set(ByteString::formatted("*{}*", base_glob), variant); glob = ByteString::formatted("*{}*", base_glob); } else { glob = ByteString::formatted("*{}*", glob); } } if (app.test_globs.is_empty()) app.test_globs.append("*"sv); TRY(collect_dump_tests(app, tests, ByteString::formatted("{}/Layout", app.test_root_path), "."sv, TestMode::Layout)); TRY(collect_dump_tests(app, tests, ByteString::formatted("{}/Text", app.test_root_path), "."sv, TestMode::Text)); TRY(collect_ref_tests(app, tests, ByteString::formatted("{}/Ref", app.test_root_path), "."sv)); TRY(collect_crash_tests(app, tests, ByteString::formatted("{}/Crash", app.test_root_path), "."sv)); TRY(collect_screenshot_tests(app, tests, ByteString::formatted("{}/Screenshot", app.test_root_path), "."sv)); tests.remove_all_matching([&](auto const& test) { static constexpr Array support_file_patterns { "*/wpt-import/*/support/*"sv, "*/wpt-import/*/resources/*"sv, "*/wpt-import/common/*"sv, "*/wpt-import/images/*"sv, }; auto normalize_path = [](ByteString const& path) { return path.replace("\\"sv, "/"sv); }; auto const test_input_path = normalize_path(test.input_path); auto const test_relative_path = normalize_path(test.relative_path); bool is_support_file = any_of(support_file_patterns, [&](auto pattern) { return test_input_path.matches(pattern); }); bool match_glob = any_of(app.test_globs, [&](auto const& glob) { return test_relative_path.matches(glob, CaseSensitivity::CaseSensitive); }); return is_support_file || !match_glob; }); // Apply explicit variants from filters for (auto& test : tests) { for (auto const& [glob, variant] : explicit_variants) { if (test.relative_path.matches(glob, CaseSensitivity::CaseSensitive)) { test.variant = variant; auto variant_suffix = variant.bytes_as_string_view().substring_view(1); test.relative_path = ByteString::formatted("{}?{}", test.relative_path, variant_suffix); test.safe_relative_path = ByteString::formatted("{}@{}", test.safe_relative_path, variant_suffix); auto dir = LexicalPath::dirname(test.expectation_path); auto title = LexicalPath::title(LexicalPath::basename(test.input_path)); if (dir.is_empty()) test.expectation_path = ByteString::formatted("{}@{}.txt", title, variant_suffix); else test.expectation_path = ByteString::formatted("{}/{}@{}.txt", dir, title, variant_suffix); break; } } } if (app.shuffle) shuffle(tests); if (app.test_dry_run) { outln("Found {} tests...", tests.size()); for (auto const& [i, test] : enumerate(tests)) outln("{}/{}: {}", i + 1, tests.size(), test.relative_path); return 0; } if (tests.is_empty()) { if (app.test_globs.is_empty()) return Error::from_string_literal("No tests found"); return Error::from_string_literal("No tests found matching filter"); } if (app.repeat_count > 1) { auto base_tests = move(tests); tests.ensure_capacity(base_tests.size() * app.repeat_count); for (size_t run_index = 1; run_index <= app.repeat_count; ++run_index) { for (auto const& base_test : base_tests) { Test test = base_test; test.run_index = run_index; test.total_runs = app.repeat_count; test.safe_relative_path = LexicalPath::join(ByteString::formatted("run-{}", run_index), test.safe_relative_path).string(); tests.append(move(test)); } } } size_t total_tests = tests.size(); auto concurrency = min(app.test_concurrency, total_tests); size_t loaded_web_views = 0; Vector> views; views.ensure_capacity(concurrency); TestRunCapture test_run_capture; for (size_t i = 0; i < concurrency; ++i) { auto view = TestWebView::create(theme, window_size); view->on_load_finish = [&](auto const&) { ++loaded_web_views; }; // FIXME: Figure out a better way to ensure that tests use default browser settings. view->reset_zoom(); views.unchecked_append(move(view)); } // We need to wait for the initial about:blank load to complete before starting the tests, otherwise we may load the // test URL before the about:blank load completes. WebContent currently cannot handle this, and will drop the test URL. Core::EventLoop::current().spin_until([&]() { return loaded_web_views == concurrency; }); // Initialize view display states (used for idle tracking even when not on TTY) s_view_display_states.resize(concurrency); for (auto [i, view] : enumerate(views)) { s_view_display_states[i].pid = view->web_content_pid(); s_view_display_states[i].active = false; } display.begin_run(); ScopeGuard clear_live_display = [&] { display.clear_live_display(); }; // Initialize per-view functions (for waking idle views) s_view_run_next_test.resize_and_keep_capacity(concurrency); s_all_tests_complete = Core::Promise::construct(); auto tests_remaining = tests.size(); auto current_test = 0uz; TestRunContext context { tests, tests_remaining, total_tests }; s_run_context = &context; ScopeGuard clear_run_context = [&] { s_run_context = nullptr; }; Vector non_passing_tests; bool fail_fast_triggered = false; for (auto [view_id, view] : enumerate(views)) { set_ui_callbacks_for_tests(*view, test_run_capture); view->clear_content_filters(); auto cleanup_test = [&, view = view.ptr()](size_t test_index, TestResult test_result) { view->on_load_finish = {}; view->on_test_finish = {}; view->on_reference_test_metadata = {}; view->on_test_variant_metadata = {}; view->on_set_test_timeout = {}; // Disconnect child crash handlers so old child crashes don't affect the next test view->disconnect_child_crash_handlers(); // Don't try to reset state if WebContent crashed - it's gone if (test_result != TestResult::Crashed) { view->reset_zoom(); view->reset_viewport_size(window_size); } auto& test = tests[test_index]; if (test.timeout_timer) { test.timeout_timer->stop(); test.timeout_timer.clear(); } s_current_test_index_by_view.remove(view); }; // run_next_test handles: reset promise, attach callback, pick test, run test auto run_next_test = [&, view = view.ptr(), cleanup_test, view_id]() { if (app.fail_fast && fail_fast_triggered) { if (view_id < s_view_display_states.size()) s_view_display_states[view_id].active = false; return; } // Check without incrementing first - only consume an index if we have a test if (current_test >= tests.size()) { // Mark this view as idle (for variant wake-up tracking) if (view_id < s_view_display_states.size()) s_view_display_states[view_id].active = false; return; } auto index = current_test++; auto& test = tests[index]; test.start_time = UnixDateTime::now(); test.index = index; // Mark this view as active (for variant wake-up tracking) display.on_test_started(view_id, test, view->web_content_pid()); // Reset promise and attach completion callback view->reset_test_promise(); view->test_promise().when_resolved([&tests, &tests_remaining, &non_passing_tests, &app, view, cleanup_test, view_id, &test_run_capture, &fail_fast_triggered](auto result) { cleanup_test(result.test_index, result.result); auto& test = tests[result.test_index]; // Clear screenshots to free memory test.actual_screenshot.clear(); test.expectation_screenshot.clear(); test.end_time = UnixDateTime::now(); // Write captured std logs to results directory. // NOTE: On crashes, we already flushed it in on_web_content_crashed. if (result.result != TestResult::Crashed) test_run_capture.write_test_output(*view); bool const is_non_passing_result = result.result != TestResult::Pass && result.result != TestResult::Expanded; bool const should_trigger_fail_fast = result.result == TestResult::Fail || result.result == TestResult::Timeout || result.result == TestResult::Crashed; if (is_non_passing_result) non_passing_tests.append(result); Display::the().on_test_finished(view_id, test, result.result); if (app.fail_fast && !fail_fast_triggered && should_trigger_fail_fast) { fail_fast_triggered = true; auto const pid = view->web_content_pid(); Display::the().on_fail_fast(test, result.result, pid); if (s_all_tests_complete) s_all_tests_complete->reject(Error::from_string_literal("Fail-fast")); Core::EventLoop::current().quit(1); if (result.result == TestResult::Timeout) maybe_attach_on_fail_fast_timeout(pid); return; } if (--tests_remaining == 0) { s_all_tests_complete->resolve({}); } else { // Use deferred_invoke to avoid destroying callback while inside it Core::deferred_invoke([view_id]() { // Wake any idle views to help with remaining tests for (size_t i = 0; i < s_view_run_next_test.size(); ++i) { if (i < s_view_display_states.size() && !s_view_display_states[i].active && s_view_run_next_test[i]) s_view_run_next_test[i](); } // Run next test for this view s_view_run_next_test[view_id](); }); } }); Core::deferred_invoke([&, index]() mutable { if (s_skipped_tests.contains_slow(tests[index].input_path)) view->on_test_complete({ index, TestResult::Skipped }); else run_test(*view, context, index, app, test_run_capture); }); }; // Store in static vector for access by variant expansion wake-up s_view_run_next_test[view_id] = move(run_next_test); Core::deferred_invoke([view_id]() { s_view_run_next_test[view_id](); }); } auto result_or_rejection = s_all_tests_complete->await(); display.clear_live_display(); display.print_run_complete(tests, non_passing_tests, result_or_rejection.is_error() ? tests_remaining : 0); if (app.dump_gc_graph) { for (auto& view : views) { if (auto path = view->dump_gc_graph(); path.is_error()) { warnln("Failed to dump GC graph: {}", path.error()); } else { outln("GC graph dumped to {}", path.value()); auto source_root = LexicalPath(app.test_root_path).parent().parent().string(); outln("GC graph explorer: file://{}/Meta/gc-heap-explorer.html?script=file://{}", source_root, path.value()); } } } bool has_helper_output = test_run_capture.write_helper_process_output(); // Generate result files (JSON data and HTML index) if (app.quiet || app.verbosity < Application::VERBOSITY_LEVEL_LOG_TEST_OUTPUT || !non_passing_tests.is_empty() || has_helper_output) { if (auto result = generate_result_files(tests, non_passing_tests); result.is_error()) warnln("Failed to generate result files: {}", result.error()); else outln("Results: file://{}/index.html", app.results_directory); } return display.fail_count + display.timeout_count + display.crashed_count + tests_remaining; } static void handle_signal(int signal) { VERIFY(signal == SIGINT || signal == SIGTERM); // Quit our event loop. This makes `::exec()` return as soon as possible, and signals to WebView::Application that // we should no longer automatically restart processes in `::process_did_exit()`. Core::EventLoop::current().quit(0); // Report current view statuses dbgln(); dbgln("{} received. Active test views:", signal == SIGINT ? "SIGINT"sv : "SIGTERM"sv); dbgln(); auto now = UnixDateTime::now(); WebView::ViewImplementation::for_each_view([&](WebView::ViewImplementation const& view) { dbg("- View {}: ", view.view_id()); auto maybe_index = s_current_test_index_by_view.get(&view); if (maybe_index.has_value() && s_run_context) { auto const& test = s_run_context->tests[*maybe_index]; dbgln("{} (duration: {})", test.relative_path, human_readable_time(now - test.start_time)); } else { dbgln("{} (no active test)", view.url()); } return IterationDecision::Continue; }); dbgln(); // Stop running tests s_all_tests_complete->reject(signal == SIGINT ? Error::from_string_view("SIGINT received"sv) : Error::from_string_view("SIGTERM received"sv)); } } ErrorOr ladybird_main(Main::Arguments arguments) { #if defined(LADYBIRD_BINARY_PATH) auto app = TRY(TestWeb::Application::create(arguments, LADYBIRD_BINARY_PATH)); #else auto app = TRY(TestWeb::Application::create(arguments, OptionalNone {})); #endif app->invocation_command_line = MUST(String::join(' ', arguments.strings)); if (app->repeat_count > 1 && app->rebaseline) { warnln("Error: --repeat cannot be used together with --rebaseline."); warnln("Run once with --rebaseline, or drop --rebaseline when repeating."); return 1; } Core::EventLoop::register_signal(SIGINT, TestWeb::handle_signal); Core::EventLoop::register_signal(SIGTERM, TestWeb::handle_signal); auto theme_path = LexicalPath::join(WebView::s_ladybird_resource_root, "themes"sv, "Default.ini"sv); auto theme = TRY(Gfx::load_system_theme(theme_path.string())); auto const& browser_options = TestWeb::Application::browser_options(); Web::DevicePixelSize window_size { browser_options.window_width, browser_options.window_height }; VERIFY(!app->test_root_path.is_empty()); app->test_root_path = LexicalPath::absolute_path(TRY(FileSystem::current_working_directory()), app->test_root_path); app->results_directory = LexicalPath::absolute_path(TRY(FileSystem::current_working_directory()), app->results_directory); TRY(Core::Directory::create(app->results_directory, Core::Directory::CreateDirectories::Yes)); TRY(app->launch_test_fixtures()); return TestWeb::run_tests(theme, window_size); }