1 // Copyright 2014 The Kyua Authors.
2 // All rights reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
8 // * Redistributions of source code must retain the above copyright
9 // notice, this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright
11 // notice, this list of conditions and the following disclaimer in the
12 // documentation and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors
14 // may be used to endorse or promote products derived from this software
15 // without specific prior written permission.
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #include "engine/scheduler.hpp"
32 #include <sys/types.h>
43 #include <atf-c++.hpp>
45 #include "engine/config.hpp"
46 #include "engine/exceptions.hpp"
47 #include "model/context.hpp"
48 #include "model/metadata.hpp"
49 #include "model/test_case.hpp"
50 #include "model/test_program.hpp"
51 #include "model/test_result.hpp"
52 #include "utils/config/tree.ipp"
53 #include "utils/datetime.hpp"
54 #include "utils/defs.hpp"
55 #include "utils/env.hpp"
56 #include "utils/format/containers.ipp"
57 #include "utils/format/macros.hpp"
58 #include "utils/fs/operations.hpp"
59 #include "utils/fs/path.hpp"
60 #include "utils/optional.ipp"
61 #include "utils/passwd.hpp"
62 #include "utils/process/status.hpp"
63 #include "utils/sanity.hpp"
64 #include "utils/stacktrace.hpp"
65 #include "utils/stream.hpp"
66 #include "utils/test_utils.ipp"
67 #include "utils/text/exceptions.hpp"
68 #include "utils/text/operations.ipp"
70 namespace config = utils::config;
71 namespace datetime = utils::datetime;
72 namespace fs = utils::fs;
73 namespace passwd = utils::passwd;
74 namespace process = utils::process;
75 namespace scheduler = engine::scheduler;
76 namespace text = utils::text;
79 using utils::optional;
85 /// Checks if a string starts with a prefix.
87 /// \param str The string to be tested.
88 /// \param prefix The prefix to look for.
90 /// \return True if the string is prefixed as specified.
92 starts_with(const std::string& str, const std::string& prefix)
94 return (str.length() >= prefix.length() &&
95 str.substr(0, prefix.length()) == prefix);
99 /// Strips a prefix from a string and converts the rest to an integer.
101 /// \param str The string to be tested.
102 /// \param prefix The prefix to strip from the string.
104 /// \return The part of the string after the prefix converted to an integer.
106 suffix_to_int(const std::string& str, const std::string& prefix)
108 PRE(starts_with(str, prefix));
110 return text::to_type< int >(str.substr(prefix.length()));
111 } catch (const text::value_error& error) {
112 std::cerr << F("Failed: %s\n") % error.what();
118 /// Mock interface definition for testing.
120 /// This scheduler interface does not execute external binaries. It is designed
121 /// to simulate the scheduler of various programs with different exit statuses.
122 class mock_interface : public scheduler::interface {
123 /// Executes the subprocess simulating an exec.
125 /// This is just a simple wrapper over _exit(2) because we cannot use
126 /// std::exit on exit from this mock interface. The reason is that we do
127 /// not want to invoke any destructors as otherwise we'd clear up the global
128 /// scheduler state by mistake. This wouldn't be a major problem if it
129 /// wasn't because doing so deletes on-disk files and we want to leave them
130 /// in place so that the parent process can test for them!
132 /// \param exit_code Exit code.
134 do_exit(const int exit_code) const UTILS_NORETURN
141 /// Executes a test case that creates various files and then fails.
143 exec_create_files_and_fail(void) const UTILS_NORETURN
145 std::cerr << "This should not be clobbered\n";
146 atf::utils::create_file("first file", "");
147 atf::utils::create_file("second-file", "");
148 fs::mkdir_p(fs::path("dir1/dir2"), 0755);
149 ::kill(::getpid(), SIGTERM);
153 /// Executes a test case that deletes all files in the current directory.
155 /// This is intended to validate that the test runs in an empty directory,
156 /// separate from any control files that the scheduler may have created.
158 exec_delete_all(void) const UTILS_NORETURN
160 const int exit_code = ::system("rm *") == -1
161 ? EXIT_FAILURE : EXIT_SUCCESS;
163 // Recreate our own cookie.
164 atf::utils::create_file("exec_test_was_called", "");
169 /// Executes a test case that returns a specific exit code.
171 /// \param exit_code Exit status to terminate the program with.
173 exec_exit(const int exit_code) const UTILS_NORETURN
178 /// Executes a test case that just fails.
180 exec_fail(void) const UTILS_NORETURN
182 std::cerr << "This should not be clobbered\n";
183 ::kill(::getpid(), SIGTERM);
187 /// Executes a test case that prints all input parameters to the functor.
189 /// \param test_program The test program to execute.
190 /// \param test_case_name Name of the test case to invoke, which must be a
192 /// \param vars User-provided variables to pass to the test program.
194 exec_print_params(const model::test_program& test_program,
195 const std::string& test_case_name,
196 const config::properties_map& vars) const
199 std::cout << F("Test program: %s\n") % test_program.relative_path();
200 std::cout << F("Test case: %s\n") % test_case_name;
201 for (config::properties_map::const_iterator iter = vars.begin();
202 iter != vars.end(); ++iter) {
203 std::cout << F("%s=%s\n") % (*iter).first % (*iter).second;
206 std::cerr << F("stderr: %s\n") % test_case_name;
208 do_exit(EXIT_SUCCESS);
212 /// Executes a test program's list operation.
214 /// This method is intended to be called within a subprocess and is expected
215 /// to terminate execution either by exec(2)ing the test program or by
216 /// exiting with a failure.
218 /// \param test_program The test program to execute.
219 /// \param vars User-provided variables to pass to the test program.
221 exec_list(const model::test_program& test_program,
222 const config::properties_map& vars)
225 const std::string name = test_program.absolute_path().leaf_name();
229 if (name == "check_i_exist") {
230 if (fs::exists(test_program.absolute_path())) {
231 std::cout << "found\n";
232 do_exit(EXIT_SUCCESS);
234 std::cout << "not_found\n";
235 do_exit(EXIT_FAILURE);
237 } else if (name == "empty") {
238 do_exit(EXIT_SUCCESS);
239 } else if (name == "misbehave") {
240 utils::abort_without_coredump();
241 } else if (name == "timeout") {
242 std::cout << "sleeping\n";
245 utils::abort_without_coredump();
246 } else if (name == "vars") {
247 for (config::properties_map::const_iterator iter = vars.begin();
248 iter != vars.end(); ++iter) {
249 std::cout << F("%s_%s\n") % (*iter).first % (*iter).second;
257 /// Computes the test cases list of a test program.
259 /// \param status The termination status of the subprocess used to execute
260 /// the exec_test() method or none if the test timed out.
261 /// \param stdout_path Path to the file containing the stdout of the test.
262 /// \param stderr_path Path to the file containing the stderr of the test.
264 /// \return A list of test cases.
265 model::test_cases_map
266 parse_list(const optional< process::status >& status,
267 const fs::path& stdout_path,
268 const fs::path& stderr_path) const
270 const std::string name = utils::read_file(stderr_path);
271 if (name == "check_i_exist") {
272 ATF_REQUIRE(status.get().exited());
273 ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus());
274 } else if (name == "empty") {
275 ATF_REQUIRE(status.get().exited());
276 ATF_REQUIRE_EQ(EXIT_SUCCESS, status.get().exitstatus());
277 } else if (name == "misbehave") {
278 throw std::runtime_error("misbehaved in parse_list");
279 } else if (name == "timeout") {
280 ATF_REQUIRE(!status);
281 } else if (name == "vars") {
282 ATF_REQUIRE(status.get().exited());
283 ATF_REQUIRE_EQ(15, status.get().exitstatus());
285 ATF_FAIL("Invalid stderr contents; got " + name);
288 model::test_cases_map_builder test_cases_builder;
290 std::ifstream input(stdout_path.c_str());
293 while (std::getline(input, line).good()) {
294 test_cases_builder.add(line);
297 return test_cases_builder.build();
300 /// Executes a test case of the test program.
302 /// This method is intended to be called within a subprocess and is expected
303 /// to terminate execution either by exec(2)ing the test program or by
304 /// exiting with a failure.
306 /// \param test_program The test program to execute.
307 /// \param test_case_name Name of the test case to invoke.
308 /// \param vars User-provided variables to pass to the test program.
309 /// \param control_directory Directory where the interface may place control
312 exec_test(const model::test_program& test_program,
313 const std::string& test_case_name,
314 const config::properties_map& vars,
315 const fs::path& control_directory) const
317 const fs::path cookie = control_directory / "exec_test_was_called";
318 std::ofstream control_file(cookie.c_str());
320 std::cerr << "Failed to create " << cookie << '\n';
323 control_file << test_case_name;
324 control_file.close();
326 if (test_case_name == "check_i_exist") {
327 do_exit(fs::exists(test_program.absolute_path()) ? 0 : 1);
328 } else if (starts_with(test_case_name, "cleanup_timeout")) {
329 exec_exit(EXIT_SUCCESS);
330 } else if (starts_with(test_case_name, "create_files_and_fail")) {
331 exec_create_files_and_fail();
332 } else if (test_case_name == "delete_all") {
334 } else if (starts_with(test_case_name, "exit ")) {
335 exec_exit(suffix_to_int(test_case_name, "exit "));
336 } else if (starts_with(test_case_name, "fail")) {
338 } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) {
340 } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) {
342 } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) {
343 exec_exit(EXIT_SUCCESS);
344 } else if (starts_with(test_case_name, "print_params")) {
345 exec_print_params(test_program, test_case_name, vars);
346 } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) {
347 exec_exit(EXIT_SUCCESS);
349 std::cerr << "Unknown test case " << test_case_name << '\n';
354 /// Executes a test cleanup routine of the test program.
356 /// This method is intended to be called within a subprocess and is expected
357 /// to terminate execution either by exec(2)ing the test program or by
358 /// exiting with a failure.
360 /// \param test_case_name Name of the test case to invoke.
362 exec_cleanup(const model::test_program& /* test_program */,
363 const std::string& test_case_name,
364 const config::properties_map& /* vars */,
365 const fs::path& /* control_directory */) const
367 std::cout << "exec_cleanup was called\n";
370 if (starts_with(test_case_name, "cleanup_timeout")) {
373 } else if (starts_with(test_case_name, "fail_body_fail_cleanup")) {
375 } else if (starts_with(test_case_name, "fail_body_pass_cleanup")) {
376 exec_exit(EXIT_SUCCESS);
377 } else if (starts_with(test_case_name, "pass_body_fail_cleanup")) {
379 } else if (starts_with(test_case_name, "skip_body_pass_cleanup")) {
380 exec_exit(EXIT_SUCCESS);
382 std::cerr << "Should not have been called for a test without "
383 "a cleanup routine" << '\n';
388 /// Computes the result of a test case based on its termination status.
390 /// \param status The termination status of the subprocess used to execute
391 /// the exec_test() method or none if the test timed out.
392 /// \param control_directory Path to the directory where the interface may
393 /// have placed control files.
394 /// \param stdout_path Path to the file containing the stdout of the test.
395 /// \param stderr_path Path to the file containing the stderr of the test.
397 /// \return A test result.
399 compute_result(const optional< process::status >& status,
400 const fs::path& control_directory,
401 const fs::path& stdout_path,
402 const fs::path& stderr_path) const
404 // Do not use any ATF_* macros here. Some of the tests below invoke
405 // this code in a subprocess, and terminating such subprocess due to a
406 // failed ATF_* macro yields mysterious failures that are incredibly
407 // hard to debug. (Case in point: the signal_handling test is racy by
408 // nature, and the test run by exec_test() above may not have created
409 // the cookie we expect below. We don't want to "silently" exit if the
410 // file is not there.)
413 return model::test_result(model::test_result_broken,
417 if (status.get().exited()) {
418 // Only sanity-check the work directory-related parameters in case
419 // of a clean exit. In all other cases, there is no guarantee that
420 // these were ever created.
421 const fs::path cookie = control_directory / "exec_test_was_called";
422 if (!atf::utils::file_exists(cookie.str())) {
423 return model::test_result(
424 model::test_result_broken,
425 "compute_result's control_directory does not seem to point "
426 "to the right location");
428 const std::string test_case_name = utils::read_file(cookie);
430 if (!atf::utils::file_exists(stdout_path.str())) {
431 return model::test_result(
432 model::test_result_broken,
433 "compute_result's stdout_path does not exist");
435 if (!atf::utils::file_exists(stderr_path.str())) {
436 return model::test_result(
437 model::test_result_broken,
438 "compute_result's stderr_path does not exist");
441 if (test_case_name == "skip_body_pass_cleanup") {
442 return model::test_result(
443 model::test_result_skipped,
444 F("Exit %s") % status.get().exitstatus());
446 return model::test_result(
447 model::test_result_passed,
448 F("Exit %s") % status.get().exitstatus());
451 return model::test_result(
452 model::test_result_failed,
453 F("Signal %s") % status.get().termsig());
459 } // anonymous namespace
462 /// Runs list_tests on the scheduler and returns the results.
464 /// \param test_name The name of the test supported by our exec_list function.
465 /// \param user_config Optional user settings for the test.
467 /// \return The loaded list of test cases.
468 static model::test_cases_map
469 check_integration_list(const char* test_name, const fs::path root,
470 const config::tree& user_config = engine::empty_config())
472 const model::test_program program = model::test_program_builder(
473 "mock", fs::path(test_name), root, "the-suite")
476 scheduler::scheduler_handle handle = scheduler::setup();
477 const model::test_cases_map test_cases = handle.list_tests(&program,
485 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_some);
486 ATF_TEST_CASE_BODY(integration__list_some)
488 config::tree user_config = engine::empty_config();
489 user_config.set_string("test_suites.the-suite.first", "test");
490 user_config.set_string("test_suites.the-suite.second", "TEST");
491 user_config.set_string("test_suites.abc.unused", "unused");
493 const model::test_cases_map test_cases = check_integration_list(
494 "vars", fs::path("."), user_config);
496 const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
497 .add("first_test").add("second_TEST").build();
498 ATF_REQUIRE_EQ(exp_test_cases, test_cases);
502 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_check_paths);
503 ATF_TEST_CASE_BODY(integration__list_check_paths)
505 fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755);
506 atf::utils::create_file("dir1/dir2/dir3/check_i_exist", "");
508 const model::test_cases_map test_cases = check_integration_list(
509 "dir2/dir3/check_i_exist", fs::path("dir1"));
511 const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
512 .add("found").build();
513 ATF_REQUIRE_EQ(exp_test_cases, test_cases);
517 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_timeout);
518 ATF_TEST_CASE_BODY(integration__list_timeout)
520 scheduler::list_timeout = datetime::delta(1, 0);
521 const model::test_cases_map test_cases = check_integration_list(
522 "timeout", fs::path("."));
524 const model::test_cases_map exp_test_cases = model::test_cases_map_builder()
525 .add("sleeping").build();
526 ATF_REQUIRE_EQ(exp_test_cases, test_cases);
530 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_fail);
531 ATF_TEST_CASE_BODY(integration__list_fail)
533 const model::test_cases_map test_cases = check_integration_list(
534 "misbehave", fs::path("."));
536 ATF_REQUIRE_EQ(1, test_cases.size());
537 const model::test_case& test_case = test_cases.begin()->second;
538 ATF_REQUIRE_EQ("__test_cases_list__", test_case.name());
539 ATF_REQUIRE(test_case.fake_result());
540 ATF_REQUIRE_EQ(model::test_result(model::test_result_broken,
541 "misbehaved in parse_list"),
542 test_case.fake_result().get());
546 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_empty);
547 ATF_TEST_CASE_BODY(integration__list_empty)
549 const model::test_cases_map test_cases = check_integration_list(
550 "empty", fs::path("."));
552 ATF_REQUIRE_EQ(1, test_cases.size());
553 const model::test_case& test_case = test_cases.begin()->second;
554 ATF_REQUIRE_EQ("__test_cases_list__", test_case.name());
555 ATF_REQUIRE(test_case.fake_result());
556 ATF_REQUIRE_EQ(model::test_result(model::test_result_broken,
557 "Empty test cases list"),
558 test_case.fake_result().get());
562 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_one);
563 ATF_TEST_CASE_BODY(integration__run_one)
565 const model::test_program_ptr program = model::test_program_builder(
566 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
567 .add_test_case("exit 41").build_ptr();
569 const config::tree user_config = engine::empty_config();
571 scheduler::scheduler_handle handle = scheduler::setup();
573 const scheduler::exec_handle exec_handle = handle.spawn_test(
574 program, "exit 41", user_config);
576 scheduler::result_handle_ptr result_handle = handle.wait_any();
577 const scheduler::test_result_handle* test_result_handle =
578 dynamic_cast< const scheduler::test_result_handle* >(
579 result_handle.get());
580 ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid());
581 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 41"),
582 test_result_handle->test_result());
583 result_handle->cleanup();
584 result_handle.reset();
590 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_many);
591 ATF_TEST_CASE_BODY(integration__run_many)
593 static const std::size_t num_test_programs = 30;
595 const config::tree user_config = engine::empty_config();
597 scheduler::scheduler_handle handle = scheduler::setup();
599 // We mess around with the "current time" below, so make sure the tests do
600 // not spuriously exceed their deadline by bumping it to a large number.
601 const model::metadata infinite_timeout = model::metadata_builder()
602 .set_timeout(datetime::delta(1000000L, 0)).build();
604 std::size_t total_tests = 0;
605 std::map< scheduler::exec_handle, model::test_program_ptr >
607 std::map< scheduler::exec_handle, std::string > exp_test_case_names;
608 std::map< scheduler::exec_handle, datetime::timestamp > exp_start_times;
609 std::map< scheduler::exec_handle, int > exp_exit_statuses;
610 for (std::size_t i = 0; i < num_test_programs; ++i) {
611 const std::string test_case_0 = F("exit %s") % (i * 3 + 0);
612 const std::string test_case_1 = F("exit %s") % (i * 3 + 1);
613 const std::string test_case_2 = F("exit %s") % (i * 3 + 2);
615 const model::test_program_ptr program = model::test_program_builder(
616 "mock", fs::path(F("program-%s") % i),
617 fs::current_path(), "the-suite")
618 .set_metadata(infinite_timeout)
619 .add_test_case(test_case_0)
620 .add_test_case(test_case_1)
621 .add_test_case(test_case_2)
624 const datetime::timestamp start_time = datetime::timestamp::from_values(
625 2014, 12, 8, 9, 40, 0, i);
627 scheduler::exec_handle exec_handle;
629 datetime::set_mock_now(start_time);
630 exec_handle = handle.spawn_test(program, test_case_0, user_config);
631 exp_test_programs.insert(std::make_pair(exec_handle, program));
632 exp_test_case_names.insert(std::make_pair(exec_handle, test_case_0));
633 exp_start_times.insert(std::make_pair(exec_handle, start_time));
634 exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3));
637 datetime::set_mock_now(start_time);
638 exec_handle = handle.spawn_test(program, test_case_1, user_config);
639 exp_test_programs.insert(std::make_pair(exec_handle, program));
640 exp_test_case_names.insert(std::make_pair(exec_handle, test_case_1));
641 exp_start_times.insert(std::make_pair(exec_handle, start_time));
642 exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 1));
645 datetime::set_mock_now(start_time);
646 exec_handle = handle.spawn_test(program, test_case_2, user_config);
647 exp_test_programs.insert(std::make_pair(exec_handle, program));
648 exp_test_case_names.insert(std::make_pair(exec_handle, test_case_2));
649 exp_start_times.insert(std::make_pair(exec_handle, start_time));
650 exp_exit_statuses.insert(std::make_pair(exec_handle, i * 3 + 2));
654 for (std::size_t i = 0; i < total_tests; ++i) {
655 const datetime::timestamp end_time = datetime::timestamp::from_values(
656 2014, 12, 8, 9, 50, 10, i);
657 datetime::set_mock_now(end_time);
658 scheduler::result_handle_ptr result_handle = handle.wait_any();
659 const scheduler::test_result_handle* test_result_handle =
660 dynamic_cast< const scheduler::test_result_handle* >(
661 result_handle.get());
663 const scheduler::exec_handle exec_handle =
664 result_handle->original_pid();
666 const model::test_program_ptr test_program = exp_test_programs.find(
667 exec_handle)->second;
668 const std::string& test_case_name = exp_test_case_names.find(
669 exec_handle)->second;
670 const datetime::timestamp& start_time = exp_start_times.find(
671 exec_handle)->second;
672 const int exit_status = exp_exit_statuses.find(exec_handle)->second;
674 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed,
675 F("Exit %s") % exit_status),
676 test_result_handle->test_result());
678 ATF_REQUIRE_EQ(test_program, test_result_handle->test_program());
679 ATF_REQUIRE_EQ(test_case_name, test_result_handle->test_case_name());
681 ATF_REQUIRE_EQ(start_time, result_handle->start_time());
682 ATF_REQUIRE_EQ(end_time, result_handle->end_time());
684 result_handle->cleanup();
686 ATF_REQUIRE(!atf::utils::file_exists(
687 result_handle->stdout_file().str()));
688 ATF_REQUIRE(!atf::utils::file_exists(
689 result_handle->stderr_file().str()));
690 ATF_REQUIRE(!atf::utils::file_exists(
691 result_handle->work_directory().str()));
693 result_handle.reset();
700 ATF_TEST_CASE_WITHOUT_HEAD(integration__run_check_paths);
701 ATF_TEST_CASE_BODY(integration__run_check_paths)
703 fs::mkdir_p(fs::path("dir1/dir2/dir3"), 0755);
704 atf::utils::create_file("dir1/dir2/dir3/program", "");
706 const model::test_program_ptr program = model::test_program_builder(
707 "mock", fs::path("dir2/dir3/program"), fs::path("dir1"), "the-suite")
708 .add_test_case("check_i_exist").build_ptr();
710 scheduler::scheduler_handle handle = scheduler::setup();
712 (void)handle.spawn_test(program, "check_i_exist", engine::default_config());
713 scheduler::result_handle_ptr result_handle = handle.wait_any();
714 const scheduler::test_result_handle* test_result_handle =
715 dynamic_cast< const scheduler::test_result_handle* >(
716 result_handle.get());
718 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
719 test_result_handle->test_result());
721 result_handle->cleanup();
722 result_handle.reset();
728 ATF_TEST_CASE_WITHOUT_HEAD(integration__parameters_and_output);
729 ATF_TEST_CASE_BODY(integration__parameters_and_output)
731 const model::test_program_ptr program = model::test_program_builder(
732 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
733 .add_test_case("print_params").build_ptr();
735 config::tree user_config = engine::empty_config();
736 user_config.set_string("test_suites.the-suite.one", "first variable");
737 user_config.set_string("test_suites.the-suite.two", "second variable");
739 scheduler::scheduler_handle handle = scheduler::setup();
741 const scheduler::exec_handle exec_handle = handle.spawn_test(
742 program, "print_params", user_config);
744 scheduler::result_handle_ptr result_handle = handle.wait_any();
745 const scheduler::test_result_handle* test_result_handle =
746 dynamic_cast< const scheduler::test_result_handle* >(
747 result_handle.get());
749 ATF_REQUIRE_EQ(exec_handle, result_handle->original_pid());
750 ATF_REQUIRE_EQ(program, test_result_handle->test_program());
751 ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name());
752 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
753 test_result_handle->test_result());
755 const fs::path stdout_file = result_handle->stdout_file();
756 ATF_REQUIRE(atf::utils::compare_file(
758 "Test program: the-program\n"
759 "Test case: print_params\n"
760 "one=first variable\n"
761 "two=second variable\n"));
762 const fs::path stderr_file = result_handle->stderr_file();
763 ATF_REQUIRE(atf::utils::compare_file(
764 stderr_file.str(), "stderr: print_params\n"));
766 result_handle->cleanup();
767 ATF_REQUIRE(!fs::exists(stdout_file));
768 ATF_REQUIRE(!fs::exists(stderr_file));
769 result_handle.reset();
775 ATF_TEST_CASE_WITHOUT_HEAD(integration__fake_result);
776 ATF_TEST_CASE_BODY(integration__fake_result)
778 const model::test_result fake_result(model::test_result_skipped,
779 "Some fake details");
781 model::test_cases_map test_cases;
782 test_cases.insert(model::test_cases_map::value_type(
783 "__fake__", model::test_case("__fake__", "ABC", fake_result)));
785 const model::test_program_ptr program(new model::test_program(
786 "mock", fs::path("the-program"), fs::current_path(), "the-suite",
787 model::metadata_builder().build(), test_cases));
789 const config::tree user_config = engine::empty_config();
791 scheduler::scheduler_handle handle = scheduler::setup();
793 (void)handle.spawn_test(program, "__fake__", user_config);
795 scheduler::result_handle_ptr result_handle = handle.wait_any();
796 const scheduler::test_result_handle* test_result_handle =
797 dynamic_cast< const scheduler::test_result_handle* >(
798 result_handle.get());
799 ATF_REQUIRE_EQ(fake_result, test_result_handle->test_result());
800 result_handle->cleanup();
801 result_handle.reset();
807 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__head_skips);
808 ATF_TEST_CASE_BODY(integration__cleanup__head_skips)
810 const model::test_program_ptr program = model::test_program_builder(
811 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
812 .add_test_case("skip_me",
813 model::metadata_builder()
814 .add_required_config("variable-that-does-not-exist")
815 .set_has_cleanup(true)
819 const config::tree user_config = engine::empty_config();
821 scheduler::scheduler_handle handle = scheduler::setup();
823 (void)handle.spawn_test(program, "skip_me", user_config);
825 scheduler::result_handle_ptr result_handle = handle.wait_any();
826 const scheduler::test_result_handle* test_result_handle =
827 dynamic_cast< const scheduler::test_result_handle* >(
828 result_handle.get());
829 ATF_REQUIRE_EQ(model::test_result(
830 model::test_result_skipped,
831 "Required configuration property "
832 "'variable-that-does-not-exist' not defined"),
833 test_result_handle->test_result());
834 ATF_REQUIRE(!atf::utils::grep_file("exec_cleanup was called",
835 result_handle->stdout_file().str()));
836 result_handle->cleanup();
837 result_handle.reset();
843 /// Runs a test to verify the behavior of cleanup routines.
845 /// \param test_case The name of the test case to invoke.
846 /// \param exp_result The expected test result of the execution.
848 do_cleanup_test(const char* test_case,
849 const model::test_result& exp_result)
851 const model::test_program_ptr program = model::test_program_builder(
852 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
853 .add_test_case(test_case)
854 .set_metadata(model::metadata_builder().set_has_cleanup(true).build())
857 const config::tree user_config = engine::empty_config();
859 scheduler::scheduler_handle handle = scheduler::setup();
861 (void)handle.spawn_test(program, test_case, user_config);
863 scheduler::result_handle_ptr result_handle = handle.wait_any();
864 const scheduler::test_result_handle* test_result_handle =
865 dynamic_cast< const scheduler::test_result_handle* >(
866 result_handle.get());
867 ATF_REQUIRE_EQ(exp_result, test_result_handle->test_result());
868 ATF_REQUIRE(atf::utils::compare_file(
869 result_handle->stdout_file().str(),
870 "exec_cleanup was called\n"));
871 result_handle->cleanup();
872 result_handle.reset();
878 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_skips);
879 ATF_TEST_CASE_BODY(integration__cleanup__body_skips)
882 "skip_body_pass_cleanup",
883 model::test_result(model::test_result_skipped, "Exit 0"));
887 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_ok);
888 ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_ok)
891 "fail_body_pass_cleanup",
892 model::test_result(model::test_result_failed, "Signal 15"));
896 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_ok__cleanup_bad);
897 ATF_TEST_CASE_BODY(integration__cleanup__body_ok__cleanup_bad)
900 "pass_body_fail_cleanup",
901 model::test_result(model::test_result_broken, "Test case cleanup "
902 "did not terminate successfully"));
906 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__body_bad__cleanup_bad);
907 ATF_TEST_CASE_BODY(integration__cleanup__body_bad__cleanup_bad)
910 "fail_body_fail_cleanup",
911 model::test_result(model::test_result_failed, "Signal 15"));
915 ATF_TEST_CASE_WITHOUT_HEAD(integration__cleanup__timeout);
916 ATF_TEST_CASE_BODY(integration__cleanup__timeout)
918 scheduler::cleanup_timeout = datetime::delta(1, 0);
921 model::test_result(model::test_result_broken, "Test case cleanup "
926 ATF_TEST_CASE_WITHOUT_HEAD(integration__check_requirements);
927 ATF_TEST_CASE_BODY(integration__check_requirements)
929 const model::test_program_ptr program = model::test_program_builder(
930 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
931 .add_test_case("exit 12")
932 .set_metadata(model::metadata_builder()
933 .add_required_config("abcde").build())
936 const config::tree user_config = engine::empty_config();
938 scheduler::scheduler_handle handle = scheduler::setup();
940 (void)handle.spawn_test(program, "exit 12", user_config);
942 scheduler::result_handle_ptr result_handle = handle.wait_any();
943 const scheduler::test_result_handle* test_result_handle =
944 dynamic_cast< const scheduler::test_result_handle* >(
945 result_handle.get());
946 ATF_REQUIRE_EQ(model::test_result(
947 model::test_result_skipped,
948 "Required configuration property 'abcde' not defined"),
949 test_result_handle->test_result());
950 result_handle->cleanup();
951 result_handle.reset();
957 ATF_TEST_CASE_WITHOUT_HEAD(integration__stacktrace);
958 ATF_TEST_CASE_BODY(integration__stacktrace)
960 utils::prepare_coredump_test(this);
962 const model::test_program_ptr program = model::test_program_builder(
963 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
964 .add_test_case("unknown-dumps-core").build_ptr();
966 const config::tree user_config = engine::empty_config();
968 scheduler::scheduler_handle handle = scheduler::setup();
970 (void)handle.spawn_test(program, "unknown-dumps-core", user_config);
972 scheduler::result_handle_ptr result_handle = handle.wait_any();
973 const scheduler::test_result_handle* test_result_handle =
974 dynamic_cast< const scheduler::test_result_handle* >(
975 result_handle.get());
976 ATF_REQUIRE_EQ(model::test_result(model::test_result_failed,
977 F("Signal %s") % SIGABRT),
978 test_result_handle->test_result());
979 ATF_REQUIRE(!atf::utils::grep_file("attempting to gather stack trace",
980 result_handle->stdout_file().str()));
981 ATF_REQUIRE( atf::utils::grep_file("attempting to gather stack trace",
982 result_handle->stderr_file().str()));
983 result_handle->cleanup();
984 result_handle.reset();
990 /// Runs a test to verify the dumping of the list of existing files on failure.
992 /// \param test_case The name of the test case to invoke.
993 /// \param exp_stderr Expected contents of stderr.
995 do_check_list_files_on_failure(const char* test_case, const char* exp_stderr)
997 const model::test_program_ptr program = model::test_program_builder(
998 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
999 .add_test_case(test_case).build_ptr();
1001 const config::tree user_config = engine::empty_config();
1003 scheduler::scheduler_handle handle = scheduler::setup();
1005 (void)handle.spawn_test(program, test_case, user_config);
1007 scheduler::result_handle_ptr result_handle = handle.wait_any();
1008 atf::utils::cat_file(result_handle->stdout_file().str(), "child stdout: ");
1009 ATF_REQUIRE(atf::utils::compare_file(result_handle->stdout_file().str(),
1011 atf::utils::cat_file(result_handle->stderr_file().str(), "child stderr: ");
1012 ATF_REQUIRE(atf::utils::compare_file(result_handle->stderr_file().str(),
1014 result_handle->cleanup();
1015 result_handle.reset();
1021 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__none);
1022 ATF_TEST_CASE_BODY(integration__list_files_on_failure__none)
1024 do_check_list_files_on_failure("fail", "This should not be clobbered\n");
1028 ATF_TEST_CASE_WITHOUT_HEAD(integration__list_files_on_failure__some);
1029 ATF_TEST_CASE_BODY(integration__list_files_on_failure__some)
1031 do_check_list_files_on_failure(
1032 "create_files_and_fail",
1033 "This should not be clobbered\n"
1034 "Files left in work directory after failure: "
1035 "dir1, first file, second-file\n");
1039 ATF_TEST_CASE_WITHOUT_HEAD(integration__prevent_clobbering_control_files);
1040 ATF_TEST_CASE_BODY(integration__prevent_clobbering_control_files)
1042 const model::test_program_ptr program = model::test_program_builder(
1043 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
1044 .add_test_case("delete_all").build_ptr();
1046 const config::tree user_config = engine::empty_config();
1048 scheduler::scheduler_handle handle = scheduler::setup();
1050 (void)handle.spawn_test(program, "delete_all", user_config);
1052 scheduler::result_handle_ptr result_handle = handle.wait_any();
1053 const scheduler::test_result_handle* test_result_handle =
1054 dynamic_cast< const scheduler::test_result_handle* >(
1055 result_handle.get());
1056 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
1057 test_result_handle->test_result());
1058 result_handle->cleanup();
1059 result_handle.reset();
1065 ATF_TEST_CASE_WITHOUT_HEAD(debug_test);
1066 ATF_TEST_CASE_BODY(debug_test)
1068 const model::test_program_ptr program = model::test_program_builder(
1069 "mock", fs::path("the-program"), fs::current_path(), "the-suite")
1070 .add_test_case("print_params").build_ptr();
1072 config::tree user_config = engine::empty_config();
1073 user_config.set_string("test_suites.the-suite.one", "first variable");
1074 user_config.set_string("test_suites.the-suite.two", "second variable");
1076 scheduler::scheduler_handle handle = scheduler::setup();
1078 const fs::path stdout_file("custom-stdout.txt");
1079 const fs::path stderr_file("custom-stderr.txt");
1081 scheduler::result_handle_ptr result_handle = handle.debug_test(
1082 program, "print_params", user_config, stdout_file, stderr_file);
1083 const scheduler::test_result_handle* test_result_handle =
1084 dynamic_cast< const scheduler::test_result_handle* >(
1085 result_handle.get());
1087 ATF_REQUIRE_EQ(program, test_result_handle->test_program());
1088 ATF_REQUIRE_EQ("print_params", test_result_handle->test_case_name());
1089 ATF_REQUIRE_EQ(model::test_result(model::test_result_passed, "Exit 0"),
1090 test_result_handle->test_result());
1092 // The original output went to a file. It's only an artifact of
1093 // debug_test() that we later get a copy in our own files.
1094 ATF_REQUIRE(stdout_file != result_handle->stdout_file());
1095 ATF_REQUIRE(stderr_file != result_handle->stderr_file());
1097 result_handle->cleanup();
1098 result_handle.reset();
1102 ATF_REQUIRE(atf::utils::compare_file(
1104 "Test program: the-program\n"
1105 "Test case: print_params\n"
1106 "one=first variable\n"
1107 "two=second variable\n"));
1108 ATF_REQUIRE(atf::utils::compare_file(
1109 stderr_file.str(), "stderr: print_params\n"));
1113 ATF_TEST_CASE_WITHOUT_HEAD(ensure_valid_interface);
1114 ATF_TEST_CASE_BODY(ensure_valid_interface)
1116 scheduler::ensure_valid_interface("mock");
1118 ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'mock2'",
1119 scheduler::ensure_valid_interface("mock2"));
1120 scheduler::register_interface(
1121 "mock2", std::shared_ptr< scheduler::interface >(new mock_interface()));
1122 scheduler::ensure_valid_interface("mock2");
1124 // Standard interfaces should not be present unless registered.
1125 ATF_REQUIRE_THROW_RE(engine::error, "Unsupported test interface 'plain'",
1126 scheduler::ensure_valid_interface("plain"));
1130 ATF_TEST_CASE_WITHOUT_HEAD(registered_interface_names);
1131 ATF_TEST_CASE_BODY(registered_interface_names)
1133 std::set< std::string > exp_names;
1135 exp_names.insert("mock");
1136 ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names());
1138 scheduler::register_interface(
1139 "mock2", std::shared_ptr< scheduler::interface >(new mock_interface()));
1140 exp_names.insert("mock2");
1141 ATF_REQUIRE_EQ(exp_names, scheduler::registered_interface_names());
1145 ATF_TEST_CASE_WITHOUT_HEAD(current_context);
1146 ATF_TEST_CASE_BODY(current_context)
1148 const model::context context = scheduler::current_context();
1149 ATF_REQUIRE_EQ(fs::current_path(), context.cwd());
1150 ATF_REQUIRE(utils::getallenv() == context.env());
1154 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__empty);
1155 ATF_TEST_CASE_BODY(generate_config__empty)
1157 const config::tree user_config = engine::empty_config();
1159 const config::properties_map exp_props;
1161 ATF_REQUIRE_EQ(exp_props,
1162 scheduler::generate_config(user_config, "missing"));
1166 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__no_matches);
1167 ATF_TEST_CASE_BODY(generate_config__no_matches)
1169 config::tree user_config = engine::empty_config();
1170 user_config.set_string("architecture", "foo");
1171 user_config.set_string("test_suites.one.var1", "value 1");
1173 const config::properties_map exp_props;
1175 ATF_REQUIRE_EQ(exp_props,
1176 scheduler::generate_config(user_config, "two"));
1180 ATF_TEST_CASE_WITHOUT_HEAD(generate_config__some_matches);
1181 ATF_TEST_CASE_BODY(generate_config__some_matches)
1183 std::vector< passwd::user > mock_users;
1184 mock_users.push_back(passwd::user("nobody", 1234, 5678));
1185 passwd::set_mock_users_for_testing(mock_users);
1187 config::tree user_config = engine::empty_config();
1188 user_config.set_string("architecture", "foo");
1189 user_config.set_string("unprivileged_user", "nobody");
1190 user_config.set_string("test_suites.one.var1", "value 1");
1191 user_config.set_string("test_suites.two.var2", "value 2");
1193 config::properties_map exp_props;
1194 exp_props["unprivileged-user"] = "nobody";
1195 exp_props["var1"] = "value 1";
1197 ATF_REQUIRE_EQ(exp_props,
1198 scheduler::generate_config(user_config, "one"));
1202 ATF_INIT_TEST_CASES(tcs)
1204 scheduler::register_interface(
1205 "mock", std::shared_ptr< scheduler::interface >(new mock_interface()));
1207 ATF_ADD_TEST_CASE(tcs, integration__list_some);
1208 ATF_ADD_TEST_CASE(tcs, integration__list_check_paths);
1209 ATF_ADD_TEST_CASE(tcs, integration__list_timeout);
1210 ATF_ADD_TEST_CASE(tcs, integration__list_fail);
1211 ATF_ADD_TEST_CASE(tcs, integration__list_empty);
1213 ATF_ADD_TEST_CASE(tcs, integration__run_one);
1214 ATF_ADD_TEST_CASE(tcs, integration__run_many);
1216 ATF_ADD_TEST_CASE(tcs, integration__run_check_paths);
1217 ATF_ADD_TEST_CASE(tcs, integration__parameters_and_output);
1219 ATF_ADD_TEST_CASE(tcs, integration__fake_result);
1220 ATF_ADD_TEST_CASE(tcs, integration__cleanup__head_skips);
1221 ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_skips);
1222 ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_ok__cleanup_bad);
1223 ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_ok);
1224 ATF_ADD_TEST_CASE(tcs, integration__cleanup__body_bad__cleanup_bad);
1225 ATF_ADD_TEST_CASE(tcs, integration__cleanup__timeout);
1226 ATF_ADD_TEST_CASE(tcs, integration__check_requirements);
1227 ATF_ADD_TEST_CASE(tcs, integration__stacktrace);
1228 ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__none);
1229 ATF_ADD_TEST_CASE(tcs, integration__list_files_on_failure__some);
1230 ATF_ADD_TEST_CASE(tcs, integration__prevent_clobbering_control_files);
1232 ATF_ADD_TEST_CASE(tcs, debug_test);
1234 ATF_ADD_TEST_CASE(tcs, ensure_valid_interface);
1235 ATF_ADD_TEST_CASE(tcs, registered_interface_names);
1237 ATF_ADD_TEST_CASE(tcs, current_context);
1239 ATF_ADD_TEST_CASE(tcs, generate_config__empty);
1240 ATF_ADD_TEST_CASE(tcs, generate_config__no_matches);
1241 ATF_ADD_TEST_CASE(tcs, generate_config__some_matches);